Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/logs/pitr-physical.log WARNING: version difference between client (1.32) and server (1.29) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.32) and server (1.29) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.32) and server (1.29) exceeds the supported minor version skew of +/-1 + main + create_infra pitr-physical-12141 + local ns=pitr-physical-12141 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.SEGUqpQYba ++ mktemp + local LAST_ERR=/tmp/tmp.4pPorOgemQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SEGUqpQYba customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.4pPorOgemQ + rm /tmp/tmp.SEGUqpQYba /tmp/tmp.4pPorOgemQ + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.7mImzZRdnZ ++ mktemp + local LAST_ERR=/tmp/tmp.qO7JswyE5l + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7mImzZRdnZ + cat /tmp/tmp.qO7JswyE5l + rm /tmp/tmp.7mImzZRdnZ /tmp/tmp.qO7JswyE5l + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.NRIklJXUA9 ++ mktemp + local LAST_ERR=/tmp/tmp.Nnotl57hVy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NRIklJXUA9 + cat /tmp/tmp.Nnotl57hVy + rm /tmp/tmp.NRIklJXUA9 /tmp/tmp.Nnotl57hVy + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.1emDwd9PVj ++ mktemp + local LAST_ERR=/tmp/tmp.jYMv0pmxhS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1emDwd9PVj + cat /tmp/tmp.jYMv0pmxhS + rm /tmp/tmp.1emDwd9PVj /tmp/tmp.jYMv0pmxhS + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.1p37j3hr1r ++ mktemp + local LAST_ERR=/tmp/tmp.bifZiCE63h + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1p37j3hr1r clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.bifZiCE63h + rm /tmp/tmp.1p37j3hr1r /tmp/tmp.bifZiCE63h + return 0 + check_crd_for_deletion PR-1872-66b64516 + local git_tag=PR-1872-66b64516 ++ yq eval .metadata.name ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1872-66b64516/deploy/crd.yaml ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' ++ /usr/bin/sed s/---//g + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IXXkduooZi +++ mktemp ++ local LAST_ERR=/tmp/tmp.u9KFhIPLvR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.IXXkduooZi ++ cat /tmp/tmp.u9KFhIPLvR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.IXXkduooZi ++ cat /tmp/tmp.u9KFhIPLvR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.IXXkduooZi ++ cat /tmp/tmp.u9KFhIPLvR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.IXXkduooZi ++ cat /tmp/tmp.u9KFhIPLvR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.IXXkduooZi /tmp/tmp.u9KFhIPLvR ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrolebinding + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + xargs kubectl delete ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' ++ mktemp + awk '{print$1}' + kubectl_bin get ns + local LAST_OUT=/tmp/tmp.F0hyu07hnz ++ mktemp + local LAST_OUT=/tmp/tmp.mhVPJQQ7yU ++ mktemp + local LAST_ERR=/tmp/tmp.pnRFNKpMse + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.IX2iHCturH + local exit_status=0 + local timeout=4 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mhVPJQQ7yU + cat /tmp/tmp.IX2iHCturH + rm /tmp/tmp.mhVPJQQ7yU /tmp/tmp.IX2iHCturH + return 0 namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted namespace "pitr-physical-30959" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.F0hyu07hnz namespace "psmdb-operator" deleted + cat /tmp/tmp.pnRFNKpMse + rm /tmp/tmp.F0hyu07hnz /tmp/tmp.pnRFNKpMse + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.10vKbkFjBU ++ mktemp + local LAST_ERR=/tmp/tmp.t55vRzrorC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.10vKbkFjBU + cat /tmp/tmp.t55vRzrorC + rm /tmp/tmp.10vKbkFjBU /tmp/tmp.t55vRzrorC + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.lZGT4cBW74 ++ mktemp + local LAST_ERR=/tmp/tmp.8TwxckQgok + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lZGT4cBW74 namespace/psmdb-operator created + cat /tmp/tmp.8TwxckQgok + rm /tmp/tmp.lZGT4cBW74 /tmp/tmp.8TwxckQgok + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ecihWgTlql +++ mktemp ++ local LAST_ERR=/tmp/tmp.adNu2VBsBg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ecihWgTlql ++ cat /tmp/tmp.adNu2VBsBg ++ rm /tmp/tmp.ecihWgTlql /tmp/tmp.adNu2VBsBg ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1872-66b64516-1-cluster6 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.eiyglgTGZ9 ++ mktemp + local LAST_ERR=/tmp/tmp.NWIhcd3wqH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1872-66b64516-1-cluster6 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eiyglgTGZ9 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1872-66b64516-1-cluster6" modified. + cat /tmp/tmp.NWIhcd3wqH + rm /tmp/tmp.eiyglgTGZ9 /tmp/tmp.NWIhcd3wqH + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.kfTHDP4HYI ++ mktemp + local LAST_ERR=/tmp/tmp.hSkGQUlC1f + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kfTHDP4HYI customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.hSkGQUlC1f + rm /tmp/tmp.kfTHDP4HYI /tmp/tmp.hSkGQUlC1f + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: psmdb-operator^' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.pUDSjjg6D8 ++ mktemp + local LAST_ERR=/tmp/tmp.Gr30Ik4QF0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pUDSjjg6D8 clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.Gr30Ik4QF0 + rm /tmp/tmp.pUDSjjg6D8 /tmp/tmp.Gr30Ik4QF0 + return 0 + kubectl_bin apply -f - + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1872-66b64516") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.vtIw8UK9ey ++ mktemp + local LAST_ERR=/tmp/tmp.4wHULHlRh7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vtIw8UK9ey deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.4wHULHlRh7 + rm /tmp/tmp.vtIw8UK9ey /tmp/tmp.4wHULHlRh7 + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.EQNwxDL7rT +++ mktemp ++ local LAST_ERR=/tmp/tmp.pu8re0X5Vz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EQNwxDL7rT ++ cat /tmp/tmp.pu8re0X5Vz ++ rm /tmp/tmp.EQNwxDL7rT /tmp/tmp.pu8re0X5Vz ++ return 0 + wait_pod percona-server-mongodb-operator-559546695f-wzw7n + local pod=percona-server-mongodb-operator-559546695f-wzw7n + set +o xtrace waiting for pod/percona-server-mongodb-operator-559546695f-wzw7n to be ready.OK + create_namespace pitr-physical-12141 + local namespace=pitr-physical-12141 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pitr-physical-12141' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pitr-physical-12141 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pitr-physical-12141 --ignore-not-found + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' ++ mktemp + xargs kubectl delete ns + kubectl_bin get ns + local LAST_OUT=/tmp/tmp.cLFRdqoN7h ++ mktemp + local LAST_OUT=/tmp/tmp.ZzhLANRw78 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.5pK5oW6P1O + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.sR8yyALZe0 + local exit_status=0 + local timeout=4 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pitr-physical-12141 --ignore-not-found ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZzhLANRw78 + cat /tmp/tmp.sR8yyALZe0 + rm /tmp/tmp.ZzhLANRw78 /tmp/tmp.sR8yyALZe0 + return 0 namespace "gke-managed-cim" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cLFRdqoN7h + cat /tmp/tmp.5pK5oW6P1O + rm /tmp/tmp.cLFRdqoN7h /tmp/tmp.5pK5oW6P1O + return 0 + kubectl_bin wait --for=delete namespace pitr-physical-12141 ++ mktemp + local LAST_OUT=/tmp/tmp.RrDlaXpX2e ++ mktemp + local LAST_ERR=/tmp/tmp.1m3BX2Dwvx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace pitr-physical-12141 namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RrDlaXpX2e + cat /tmp/tmp.1m3BX2Dwvx + rm /tmp/tmp.RrDlaXpX2e /tmp/tmp.1m3BX2Dwvx + return 0 + desc 'create namespace pitr-physical-12141' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pitr-physical-12141 ----------------------------------------------------------------------------------- + kubectl_bin create namespace pitr-physical-12141 ++ mktemp + local LAST_OUT=/tmp/tmp.UmETcuhO4C ++ mktemp + local LAST_ERR=/tmp/tmp.f9YTNmVUlM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pitr-physical-12141 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UmETcuhO4C namespace/pitr-physical-12141 created + cat /tmp/tmp.f9YTNmVUlM + rm /tmp/tmp.UmETcuhO4C /tmp/tmp.f9YTNmVUlM + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZHjNjZXGFE +++ mktemp ++ local LAST_ERR=/tmp/tmp.pPojncEF7a ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZHjNjZXGFE ++ cat /tmp/tmp.pPojncEF7a ++ rm /tmp/tmp.ZHjNjZXGFE /tmp/tmp.pPojncEF7a ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1872-66b64516-1-cluster6 --namespace=pitr-physical-12141 ++ mktemp + local LAST_OUT=/tmp/tmp.ZW1hOUKEVO ++ mktemp + local LAST_ERR=/tmp/tmp.er5SgqErRk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1872-66b64516-1-cluster6 --namespace=pitr-physical-12141 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZW1hOUKEVO Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1872-66b64516-1-cluster6" modified. + cat /tmp/tmp.er5SgqErRk + rm /tmp/tmp.ZW1hOUKEVO /tmp/tmp.er5SgqErRk + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Mon Mar 24 03:28:54 2025 NAMESPACE: pitr-physical-12141 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.pitr-physical-12141.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace pitr-physical-12141 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace pitr-physical-12141 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace pitr-physical-12141 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace pitr-physical-12141 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rri6sLcY1y +++ mktemp ++ local LAST_ERR=/tmp/tmp.SrrQLDJi02 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rri6sLcY1y ++ cat /tmp/tmp.SrrQLDJi02 ++ rm /tmp/tmp.rri6sLcY1y /tmp/tmp.SrrQLDJi02 ++ return 0 + MINIO_POD=minio-service-847fc8bb8d-5rrlx + wait_pod minio-service-847fc8bb8d-5rrlx + local pod=minio-service-847fc8bb8d-5rrlx + set +o xtrace waiting for pod/minio-service-847fc8bb8d-5rrlx to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.pitr-physical-12141.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.qzZmMgdkuF ++ mktemp + local LAST_ERR=/tmp/tmp.5veYB6mxHC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.pitr-physical-12141.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qzZmMgdkuF service/minio-service created + cat /tmp/tmp.5veYB6mxHC + rm /tmp/tmp.qzZmMgdkuF /tmp/tmp.5veYB6mxHC + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.aw57ktFYbJ ++ mktemp + local LAST_ERR=/tmp/tmp.oldmeSM8Qb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aw57ktFYbJ make_bucket: operator-testing pod "aws-cli" deleted + cat /tmp/tmp.oldmeSM8Qb If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_pitr-physical-12141 + rm /tmp/tmp.aw57ktFYbJ /tmp/tmp.oldmeSM8Qb + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/conf/minio-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.hHQa36LZFJ ++ mktemp + local LAST_ERR=/tmp/tmp.ObdRk8xONu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/conf/minio-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hHQa36LZFJ secret/some-users created deployment.apps/psmdb-client created secret/minio-secret created + cat /tmp/tmp.ObdRk8xONu + rm /tmp/tmp.hHQa36LZFJ /tmp/tmp.ObdRk8xONu + return 0 + cluster=some-name + desc 'create first PSMDB cluster some-name' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/conf/some-name-rs0.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/conf/some-name-rs0.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/conf/some-name-rs0.yml + local LAST_OUT=/tmp/tmp.r7K4T0SUFG + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1872-66b64516"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_ERR=/tmp/tmp.cErlm3a0tg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.r7K4T0SUFG perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.cErlm3a0tg + rm /tmp/tmp.r7K4T0SUFG /tmp/tmp.cErlm3a0tg + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready......OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.....OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zsu2fP8JFP +++ mktemp ++ local LAST_ERR=/tmp/tmp.JTQ33ZEkXn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zsu2fP8JFP ++ cat /tmp/tmp.JTQ33ZEkXn ++ rm /tmp/tmp.zsu2fP8JFP /tmp/tmp.JTQ33ZEkXn ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cSSDW3Su57 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2dux2KTRRx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cSSDW3Su57 ++ cat /tmp/tmp.2dux2KTRRx ++ rm /tmp/tmp.cSSDW3Su57 /tmp/tmp.2dux2KTRRx ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness........................................ + wait_for_running some-name-cfg 3 false + local name=some-name-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-cfg-0 + local pod=some-name-cfg-0 + set +o xtrace waiting for pod/some-name-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-cfg-1 + local pod=some-name-cfg-1 + set +o xtrace waiting for pod/some-name-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YejJ43EunQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.bd2kF4PdFu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YejJ43EunQ ++ cat /tmp/tmp.bd2kF4PdFu ++ rm /tmp/tmp.YejJ43EunQ /tmp/tmp.bd2kF4PdFu ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-cfg-2 + local pod=some-name-cfg-2 + set +o xtrace waiting for pod/some-name-cfg-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.O5Wdv0WAlt +++ mktemp ++ local LAST_ERR=/tmp/tmp.giDr5WTrop ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.O5Wdv0WAlt ++ cat /tmp/tmp.giDr5WTrop ++ rm /tmp/tmp.O5Wdv0WAlt /tmp/tmp.giDr5WTrop ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + sleep 10 + write_initial_data + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongos 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-mongos.pitr-physical-12141 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ awk -F: '{print $2}' ++ echo .svc.cluster.local + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zu24gLUFIK +++ mktemp ++ local LAST_ERR=/tmp/tmp.4JJlek37YV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zu24gLUFIK ++ cat /tmp/tmp.4JJlek37YV ++ rm /tmp/tmp.zu24gLUFIK /tmp/tmp.4JJlek37YV ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.VYeh51Zui7 ++ mktemp + local LAST_ERR=/tmp/tmp.YSlIIs0VO2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VYeh51Zui7 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("34a7e9f7-c772-463f-8f89-60b338696ce7") } Percona Server for MongoDB server version: v8.0.4-2 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.YSlIIs0VO2 + rm /tmp/tmp.VYeh51Zui7 /tmp/tmp.YSlIIs0VO2 + return 0 + sleep 2 + write_document + local cmp_postfix= + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongos 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-mongos.pitr-physical-12141 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UtMpohG1Th +++ mktemp ++ local LAST_ERR=/tmp/tmp.wpP8eg4z6Y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UtMpohG1Th ++ cat /tmp/tmp.wpP8eg4z6Y ++ rm /tmp/tmp.UtMpohG1Th /tmp/tmp.wpP8eg4z6Y ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.9Pvue4ZQAM ++ mktemp + local LAST_ERR=/tmp/tmp.na1owJjNJY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9Pvue4ZQAM Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("084aaec4-6ee6-4952-aaa8-c41c80c531ea") } Percona Server for MongoDB server version: v8.0.4-2 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.na1owJjNJY + rm /tmp/tmp.9Pvue4ZQAM /tmp/tmp.na1owJjNJY + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongos_cmd find myApp:myPass@some-name-mongos.pitr-physical-12141 + local command=find + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local postfix= + local suffix= + local database=myApp + local collection=test + local port=27017 + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.pitr-physical-12141 mongodb '' '' 27017 + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + suffix_port= + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6s8e0lNQ9t +++ mktemp ++ local LAST_ERR=/tmp/tmp.q1p8sK0n7z ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6s8e0lNQ9t ++ cat /tmp/tmp.q1p8sK0n7z ++ rm /tmp/tmp.6s8e0lNQ9t /tmp/tmp.q1p8sK0n7z ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.9SGogBX3EN ++ mktemp + local LAST_ERR=/tmp/tmp.JaFk33rE6e + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9SGogBX3EN + cat /tmp/tmp.JaFk33rE6e + rm /tmp/tmp.9SGogBX3EN /tmp/tmp.JaFk33rE6e + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/compare/find.json /tmp/tmp.BqjbN3Laow/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2025-03-24T03:32:44.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2025-03-24T03:32:45.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2025-03-24T03:32:43.000+0000 I listening for the commands + wait_backup_agent some-name-rs1-0 + local agent_pod=some-name-rs1-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs1-0...2025-03-24T03:32:47.000+0000 I listening for the commands + wait_backup_agent some-name-rs1-1 + local agent_pod=some-name-rs1-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs1-1...2025-03-24T03:32:45.000+0000 I listening for the commands + wait_backup_agent some-name-rs1-2 + local agent_pod=some-name-rs1-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs1-2...2025-03-24T03:32:44.000+0000 I listening for the commands + wait_backup_agent some-name-rs2-0 + local agent_pod=some-name-rs2-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs2-0...2025-03-24T03:32:45.000+0000 I listening for the commands + wait_backup_agent some-name-rs2-1 + local agent_pod=some-name-rs2-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs2-1...2025-03-24T03:32:47.000+0000 I listening for the commands + wait_backup_agent some-name-rs2-2 + local agent_pod=some-name-rs2-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs2-2...2025-03-24T03:32:44.000+0000 I listening for the commands + echo 'Sleeping for 360 seconds' Sleeping for 360 seconds + sleep 360 + backup_name_minio=backup-minio + run_backup backup-minio 2 physical + local name=backup-minio + local idx=2 + local type=physical + desc 'run backup backup-minio-2' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-2 ----------------------------------------------------------------------------------- + /usr/bin/sed -e 's/name:/name: backup-minio-2/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/conf/backup-minio.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's/type:/type: physical/' ++ mktemp + local LAST_OUT=/tmp/tmp.oIGuLjN5ew ++ mktemp + local LAST_ERR=/tmp/tmp.YoJojAuw0r + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oIGuLjN5ew perconaservermongodbbackup.psmdb.percona.com/backup-minio-2 created + cat /tmp/tmp.YoJojAuw0r + rm /tmp/tmp.oIGuLjN5ew /tmp/tmp.YoJojAuw0r + return 0 + wait_backup backup-minio-2 + local backup_name=backup-minio-2 + local target_state=ready + set +o xtrace waiting for backup-minio-2 to reach ready state....... + sleep 5 + write_document -2nd + local cmp_postfix=-2nd + desc 'write initial data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write initial data, read from all ----------------------------------------------------------------------------------- + run_mongos 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-mongos.pitr-physical-12141 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fTnIaEzCaN +++ mktemp ++ local LAST_ERR=/tmp/tmp.dpi8mdj30d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fTnIaEzCaN ++ cat /tmp/tmp.dpi8mdj30d ++ rm /tmp/tmp.fTnIaEzCaN /tmp/tmp.dpi8mdj30d ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Q68Zo46adW ++ mktemp + local LAST_ERR=/tmp/tmp.5PkFAiXIFD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Q68Zo46adW Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("dcc57187-e514-41aa-975e-4896f0d2eb9f") } Percona Server for MongoDB server version: v8.0.4-2 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.5PkFAiXIFD + rm /tmp/tmp.Q68Zo46adW /tmp/tmp.5PkFAiXIFD + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongos_cmd find myApp:myPass@some-name-mongos.pitr-physical-12141 -2nd + local command=find + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local port=27017 + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.pitr-physical-12141 mongodb '' '' 27017 + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nTkSB8cKQa +++ mktemp ++ local LAST_ERR=/tmp/tmp.QusbhxVw0s ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nTkSB8cKQa ++ cat /tmp/tmp.QusbhxVw0s ++ rm /tmp/tmp.nTkSB8cKQa /tmp/tmp.QusbhxVw0s ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.UCgxphd3aF ++ mktemp + local LAST_ERR=/tmp/tmp.jLrMNeYlNv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UCgxphd3aF + cat /tmp/tmp.jLrMNeYlNv + rm /tmp/tmp.UCgxphd3aF /tmp/tmp.jLrMNeYlNv + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/compare/find-2nd.json /tmp/tmp.BqjbN3Laow/find-2nd ++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++ jq '.backups.snapshot[0].restoreTo' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0hyFdm2t8L +++ mktemp ++ local LAST_ERR=/tmp/tmp.ssKhLDdu73 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0hyFdm2t8L ++ cat /tmp/tmp.ssKhLDdu73 ++ rm /tmp/tmp.0hyFdm2t8L /tmp/tmp.ssKhLDdu73 ++ return 0 + backup_last_write=1742787584 ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.KDA3ZzIJzQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.YDsXFaJ5LH +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.KDA3ZzIJzQ +++ cat /tmp/tmp.YDsXFaJ5LH +++ rm /tmp/tmp.KDA3ZzIJzQ /tmp/tmp.YDsXFaJ5LH +++ return 0 ++ echo null + last_chunk=null + retries=0 + [[ null -gt 1742787584 ]] + [[ 0 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++++ mktemp +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' +++ local LAST_OUT=/tmp/tmp.wxjJXcK1AH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.VWzbAnKQob +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.wxjJXcK1AH +++ cat /tmp/tmp.VWzbAnKQob +++ rm /tmp/tmp.wxjJXcK1AH /tmp/tmp.VWzbAnKQob +++ return 0 ++ echo null + last_chunk=null + retries=1 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 1 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.jFbIO94j0u ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ZBUg5RPby0 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.jFbIO94j0u +++ cat /tmp/tmp.ZBUg5RPby0 +++ rm /tmp/tmp.jFbIO94j0u /tmp/tmp.ZBUg5RPby0 +++ return 0 ++ echo null + last_chunk=null + retries=2 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 2 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.sUiYGNCbeP ++++ mktemp +++ local LAST_ERR=/tmp/tmp.aSHuUSTRbx +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.sUiYGNCbeP +++ cat /tmp/tmp.aSHuUSTRbx +++ rm /tmp/tmp.sUiYGNCbeP /tmp/tmp.aSHuUSTRbx +++ return 0 ++ echo null + last_chunk=null + retries=3 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 3 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.CRxwUVuMOv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.B80XBPCRON +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.CRxwUVuMOv +++ cat /tmp/tmp.B80XBPCRON +++ rm /tmp/tmp.CRxwUVuMOv /tmp/tmp.B80XBPCRON +++ return 0 ++ echo null + last_chunk=null + retries=4 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 4 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.dGWJScplO9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.BjScqBhjIi +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.dGWJScplO9 +++ cat /tmp/tmp.BjScqBhjIi +++ rm /tmp/tmp.dGWJScplO9 /tmp/tmp.BjScqBhjIi +++ return 0 ++ echo null + last_chunk=null + retries=5 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 5 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.UKPseGNehO ++++ mktemp +++ local LAST_ERR=/tmp/tmp.46XQzhEOQh +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.UKPseGNehO +++ cat /tmp/tmp.46XQzhEOQh +++ rm /tmp/tmp.UKPseGNehO /tmp/tmp.46XQzhEOQh +++ return 0 ++ echo null + last_chunk=null + retries=6 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 6 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nUPl2vxuJN ++++ mktemp +++ local LAST_ERR=/tmp/tmp.PLz5D35LUL +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.nUPl2vxuJN +++ cat /tmp/tmp.PLz5D35LUL +++ rm /tmp/tmp.nUPl2vxuJN /tmp/tmp.PLz5D35LUL +++ return 0 ++ echo null + last_chunk=null + retries=7 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 7 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.fKC8bqdGuD ++++ mktemp +++ local LAST_ERR=/tmp/tmp.yry9euzfv3 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.fKC8bqdGuD +++ cat /tmp/tmp.yry9euzfv3 +++ rm /tmp/tmp.fKC8bqdGuD /tmp/tmp.yry9euzfv3 +++ return 0 ++ echo null + last_chunk=null + retries=8 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 8 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.2fhMSlRJPZ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.8GKDfYj87P +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.2fhMSlRJPZ +++ cat /tmp/tmp.8GKDfYj87P +++ rm /tmp/tmp.2fhMSlRJPZ /tmp/tmp.8GKDfYj87P +++ return 0 ++ echo null + last_chunk=null + retries=9 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 9 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.tGm21RTqkW ++++ mktemp +++ local LAST_ERR=/tmp/tmp.mmeNiaH8bR +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.tGm21RTqkW +++ cat /tmp/tmp.mmeNiaH8bR +++ rm /tmp/tmp.tGm21RTqkW /tmp/tmp.mmeNiaH8bR +++ return 0 ++ echo null + last_chunk=null + retries=10 ++ format_date null ++ local timestamp=null +++ TZ=UTC +++ /usr/bin/date -d@null '+%Y-%m-%d %H:%M:%S' /usr/bin/date: invalid date ‘@null’ ++ echo ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk () to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ null -gt 1742787584 ]] + [[ 10 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.GCuEobM7Zu ++++ mktemp +++ local LAST_ERR=/tmp/tmp.dPDzHfxkMD +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.GCuEobM7Zu +++ cat /tmp/tmp.dPDzHfxkMD +++ rm /tmp/tmp.GCuEobM7Zu /tmp/tmp.dPDzHfxkMD +++ return 0 ++ echo 1742787737 + last_chunk=1742787737 + retries=11 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787584 ++ local timestamp=1742787584 +++ TZ=UTC +++ /usr/bin/date -d@1742787584 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:39:44 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than last write (2025-03-24 03:39:44)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than last write (2025-03-24 03:39:44) + sleep 10 + [[ 1742787737 -gt 1742787584 ]] + check_recovery backup-minio-2 date 1742787737 -2nd some-name + local backup_name=backup-minio-2 + local restore_type=date + local restore_date=1742787737 + local cmp_postfix=-2nd + local cluster_name=some-name ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.irxx3DIXR0 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.LLn78gfv8e +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.irxx3DIXR0 +++ cat /tmp/tmp.LLn78gfv8e +++ rm /tmp/tmp.irxx3DIXR0 /tmp/tmp.LLn78gfv8e +++ return 0 ++ echo 1742787737 + local latest_ts=1742787737 + desc 'write more data before restore by date' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by date ----------------------------------------------------------------------------------- + run_mongos 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-mongos.pitr-physical-12141 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OeJ9KuzvTx +++ mktemp ++ local LAST_ERR=/tmp/tmp.etnyYgaoyX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OeJ9KuzvTx ++ cat /tmp/tmp.etnyYgaoyX ++ rm /tmp/tmp.OeJ9KuzvTx /tmp/tmp.etnyYgaoyX ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.HYn9P3RIW6 ++ mktemp + local LAST_ERR=/tmp/tmp.TmJeh7Th6t + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HYn9P3RIW6 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9a49e9d8-f153-4a63-8ade-2e014038aa55") } Percona Server for MongoDB server version: v8.0.4-2 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.TmJeh7Th6t + rm /tmp/tmp.HYn9P3RIW6 /tmp/tmp.TmJeh7Th6t + return 0 + [[ -n 1742787737 ]] ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + desc 'Restoring to time 2025-03-24 03:42:17' + set +o xtrace ----------------------------------------------------------------------------------- Restoring to time 2025-03-24 03:42:17 ----------------------------------------------------------------------------------- + retries=0 + [[ 1742787737 -gt 1742787737 ]] + [[ 0 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.58yMmPSAGo ++++ mktemp +++ local LAST_ERR=/tmp/tmp.mBEWJpAody +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.58yMmPSAGo +++ cat /tmp/tmp.mBEWJpAody +++ rm /tmp/tmp.58yMmPSAGo /tmp/tmp.mBEWJpAody +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=1 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 1 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.K8pjvnlCUH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.3Y1g7x8TBY +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.K8pjvnlCUH +++ cat /tmp/tmp.3Y1g7x8TBY +++ rm /tmp/tmp.K8pjvnlCUH /tmp/tmp.3Y1g7x8TBY +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=2 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 2 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.zdSVKFKiaH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Mc1P4sY3I3 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.zdSVKFKiaH +++ cat /tmp/tmp.Mc1P4sY3I3 +++ rm /tmp/tmp.zdSVKFKiaH /tmp/tmp.Mc1P4sY3I3 +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=3 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 3 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.edHxa8oRfI ++++ mktemp +++ local LAST_ERR=/tmp/tmp.StJzjd7Ss7 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.edHxa8oRfI +++ cat /tmp/tmp.StJzjd7Ss7 +++ rm /tmp/tmp.edHxa8oRfI /tmp/tmp.StJzjd7Ss7 +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=4 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 4 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++++ mktemp +++ local LAST_OUT=/tmp/tmp.DeUJXEbhaV ++++ mktemp +++ local LAST_ERR=/tmp/tmp.WqaIRyYFs4 +++ local exit_status=0 +++ local timeout=4 +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.DeUJXEbhaV +++ cat /tmp/tmp.WqaIRyYFs4 +++ rm /tmp/tmp.DeUJXEbhaV /tmp/tmp.WqaIRyYFs4 +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=5 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 5 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.HEJ5t1qQlT ++++ mktemp +++ local LAST_ERR=/tmp/tmp.JxVpFokmXJ +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.HEJ5t1qQlT +++ cat /tmp/tmp.JxVpFokmXJ +++ rm /tmp/tmp.HEJ5t1qQlT /tmp/tmp.JxVpFokmXJ +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=6 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 6 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.a37Ua7P0iq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.PHaaxDa0Vp +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.a37Ua7P0iq +++ cat /tmp/tmp.PHaaxDa0Vp +++ rm /tmp/tmp.a37Ua7P0iq /tmp/tmp.PHaaxDa0Vp +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=7 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 7 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++++ mktemp +++ local LAST_OUT=/tmp/tmp.fBtLrvgaGL ++++ mktemp +++ local LAST_ERR=/tmp/tmp.lM0rLIILo1 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.fBtLrvgaGL +++ cat /tmp/tmp.lM0rLIILo1 +++ rm /tmp/tmp.fBtLrvgaGL /tmp/tmp.lM0rLIILo1 +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=8 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 8 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.t3dvg61XAB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.pqma4dA243 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.t3dvg61XAB +++ cat /tmp/tmp.pqma4dA243 +++ rm /tmp/tmp.t3dvg61XAB /tmp/tmp.pqma4dA243 +++ return 0 ++ echo 1742787737 + latest_ts=1742787737 + retries=9 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:42:17) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787737 -gt 1742787737 ]] + [[ 9 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ptSsd0a2Qy ++++ mktemp +++ local LAST_ERR=/tmp/tmp.zl1Te70Z5A +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.ptSsd0a2Qy +++ cat /tmp/tmp.zl1Te70Z5A +++ rm /tmp/tmp.ptSsd0a2Qy /tmp/tmp.zl1Te70Z5A +++ return 0 ++ echo 1742787860 + latest_ts=1742787860 + retries=10 ++ format_date 1742787860 ++ local timestamp=1742787860 +++ TZ=UTC +++ /usr/bin/date -d@1742787860 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:20 ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:20) to be greater than restore target (2025-03-24 03:42:17)' Waiting for last oplog chunk (2025-03-24 03:44:20) to be greater than restore target (2025-03-24 03:42:17) + sleep 10 + [[ 1742787860 -gt 1742787737 ]] + desc 'check restore by date' + set +o xtrace ----------------------------------------------------------------------------------- check restore by date ----------------------------------------------------------------------------------- + '[' -z 1742787737 ']' + /usr/bin/sed -e 's/type:/type: date/' + kubectl_bin apply -f - + /usr/bin/sed -e 's/backupName:/backupName: backup-minio-2/' + /usr/bin/sed -e 's/name:/name: restore-backup-minio-2/' ++ mktemp + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/conf/restore.yml + local LAST_OUT=/tmp/tmp.HL6rf22eTM ++ format_date 1742787737 ++ local timestamp=1742787737 +++ TZ=UTC ++ mktemp + local LAST_ERR=/tmp/tmp.8w260PH9TR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - +++ /usr/bin/date -d@1742787737 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:42:17 + /usr/bin/sed -e 's/date:/date: 2025-03-24 03:42:17/' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HL6rf22eTM perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-2 created + cat /tmp/tmp.8w260PH9TR + rm /tmp/tmp.HL6rf22eTM /tmp/tmp.8w260PH9TR + return 0 + wait_restore backup-minio-2 some-name requested 0 900 + local backup_name=backup-minio-2 + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=900 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio-2 to reach requested state.........................................................................................................................................................................................................................................................................................................................................................................................................................................................................................OK + '[' 0 -eq 1 ']' + echo + wait_restore backup-minio-2 some-name ready 0 1600 + local backup_name=backup-minio-2 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1600 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio-2 to reach ready state.........................................................................................OK + '[' 0 -eq 1 ']' + echo + set -o xtrace + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready..................OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready.....OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WhL5PbOMg4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PVz48YcUkv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WhL5PbOMg4 ++ cat /tmp/tmp.PVz48YcUkv ++ rm /tmp/tmp.WhL5PbOMg4 /tmp/tmp.PVz48YcUkv ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready......OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MTuJDPxL6I +++ mktemp ++ local LAST_ERR=/tmp/tmp.PGjQG5Z7Iw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MTuJDPxL6I ++ cat /tmp/tmp.PGjQG5Z7Iw ++ rm /tmp/tmp.MTuJDPxL6I /tmp/tmp.PGjQG5Z7Iw ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + sleep 10 + compare_mongos_cmd find myApp:myPass@some-name-mongos.pitr-physical-12141 -2nd + local command=find + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + local port=27017 + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.pitr-physical-12141 mongodb '' '' 27017 + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local port=27017 + local mongo_bin=mongo + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CvIxxaYY96 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sWw5XTFsU3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CvIxxaYY96 ++ cat /tmp/tmp.sWw5XTFsU3 ++ rm /tmp/tmp.CvIxxaYY96 /tmp/tmp.sWw5XTFsU3 ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.vsEZ7txE2t ++ mktemp + local LAST_ERR=/tmp/tmp.Tdr6xYR8UQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vsEZ7txE2t + cat /tmp/tmp.Tdr6xYR8UQ + rm /tmp/tmp.vsEZ7txE2t /tmp/tmp.Tdr6xYR8UQ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/compare/find-2nd.json /tmp/tmp.BqjbN3Laow/find-2nd + run_backup backup-minio 3 physical + local name=backup-minio + local idx=3 + local type=physical + desc 'run backup backup-minio-3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio-3 ----------------------------------------------------------------------------------- + /usr/bin/sed -e 's/name:/name: backup-minio-3/' + /usr/bin/sed -e 's/type:/type: physical/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.bqL4tjwTqk ++ mktemp + local LAST_ERR=/tmp/tmp.ddycmmXmkT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bqL4tjwTqk perconaservermongodbbackup.psmdb.percona.com/backup-minio-3 created + cat /tmp/tmp.ddycmmXmkT + rm /tmp/tmp.bqL4tjwTqk /tmp/tmp.ddycmmXmkT + return 0 + wait_backup backup-minio-3 + local backup_name=backup-minio-3 + local target_state=ready + set +o xtrace waiting for backup-minio-3 to reach ready state........... + sleep 5 + compare_latest_restorable_time some-name-rs0 backup-minio-3 + local cluster=some-name-rs0 + local backup_name=backup-minio-3 + local latest_restorable_time + local backup_time ++ get_latest_restorable_time some-name-rs0 ++ local cluster=some-name-rs0 ++ local first_timestamp ++ local second_timestamp ++ local retry=0 ++ [[ '' != '' ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.YlNefQ6ezU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.xEU2qF4kS7 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.YlNefQ6ezU +++ cat /tmp/tmp.xEU2qF4kS7 +++ rm /tmp/tmp.YlNefQ6ezU /tmp/tmp.xEU2qF4kS7 +++ return 0 ++ first_timestamp=1742787864 ++ sleep 5 ++ [[ 1742787864 != '' ]] ++ [[ 1742787864 != \n\u\l\l ]] +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Tkq32hw12K ++++ mktemp +++ local LAST_ERR=/tmp/tmp.odJK76zaM3 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Tkq32hw12K +++ cat /tmp/tmp.odJK76zaM3 +++ rm /tmp/tmp.Tkq32hw12K /tmp/tmp.odJK76zaM3 +++ return 0 ++ second_timestamp=1742787864 ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 1742787864 != '' ]] ++ [[ 1742787864 != \n\u\l\l ]] ++ [[ 1742787864 == 1742787864 ]] ++ /usr/bin/date -u -d @1742787864 +%Y-%m-%dT%H:%M:%SZ + latest_restorable_time=2025-03-24T03:44:24Z ++ get_latest_restorable_time_from_backup_object backup-minio-3 ++ local backup_name=backup-minio-3 ++ local latestRestorableTime ++ local retry=0 ++ [[ '' != '' ]] ++ sleep 5 +++ kubectl_bin get psmdb-backup backup-minio-3 -o 'jsonpath={.status.latestRestorableTime}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.cE8DFDvQyr ++++ mktemp +++ local LAST_ERR=/tmp/tmp.vaQtyP45WB +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb-backup backup-minio-3 -o 'jsonpath={.status.latestRestorableTime}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.cE8DFDvQyr +++ cat /tmp/tmp.vaQtyP45WB +++ rm /tmp/tmp.cE8DFDvQyr /tmp/tmp.vaQtyP45WB +++ return 0 ++ latestRestorableTime=2025-03-24T03:44:24Z ++ let retry+=1 ++ [[ 1 -gt 30 ]] ++ [[ 2025-03-24T03:44:24Z != '' ]] ++ [[ 2025-03-24T03:44:24Z != \n\u\l\l ]] ++ echo 2025-03-24T03:44:24Z + backup_time=2025-03-24T03:44:24Z + [[ 2025-03-24T03:44:24Z != \2\0\2\5\-\0\3\-\2\4\T\0\3\:\4\4\:\2\4\Z ]] + check_recovery backup-minio-3 latest '' -3rd some-name + local backup_name=backup-minio-3 + local restore_type=latest + local restore_date= + local cmp_postfix=-3rd + local cluster_name=some-name ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.845r3wccd6 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.UL0usu8TD1 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.845r3wccd6 +++ cat /tmp/tmp.UL0usu8TD1 +++ rm /tmp/tmp.845r3wccd6 /tmp/tmp.UL0usu8TD1 +++ return 0 ++ echo 1742787864 + local latest_ts=1742787864 + desc 'write more data before restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- write more data before restore by latest ----------------------------------------------------------------------------------- + run_mongos 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-mongos.pitr-physical-12141 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.86DLKTlC1m +++ mktemp ++ local LAST_ERR=/tmp/tmp.uZ1NmfViOv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.86DLKTlC1m ++ cat /tmp/tmp.uZ1NmfViOv ++ rm /tmp/tmp.86DLKTlC1m /tmp/tmp.uZ1NmfViOv ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.yY8aCgHibW ++ mktemp + local LAST_ERR=/tmp/tmp.6cuMDIexJL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yY8aCgHibW Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("7cfa38de-b166-42d9-8cbc-5963e259fb09") } Percona Server for MongoDB server version: v8.0.4-2 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.6cuMDIexJL + rm /tmp/tmp.yY8aCgHibW /tmp/tmp.6cuMDIexJL + return 0 + [[ -n '' ]] + desc 'Restoring to latest' + set +o xtrace ----------------------------------------------------------------------------------- Restoring to latest ----------------------------------------------------------------------------------- ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.YYWDgOACYc ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ExP6Qs3s3H +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.YYWDgOACYc +++ cat /tmp/tmp.ExP6Qs3s3H +++ rm /tmp/tmp.YYWDgOACYc /tmp/tmp.ExP6Qs3s3H +++ return 0 ++ echo 1742787864 + local current_ts=1742787864 + retries=0 + [[ 1742787864 -gt 1742787864 ]] + [[ 0 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.V77ZCz2cNY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.mO5jPvhS7w +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.V77ZCz2cNY +++ cat /tmp/tmp.mO5jPvhS7w +++ rm /tmp/tmp.V77ZCz2cNY /tmp/tmp.mO5jPvhS7w +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=1 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 1 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.U8M6hQNSJY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.OpWqd9emGk +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.U8M6hQNSJY +++ cat /tmp/tmp.OpWqd9emGk +++ rm /tmp/tmp.U8M6hQNSJY /tmp/tmp.OpWqd9emGk +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=2 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 2 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Qp6Erv1ze3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.OiUwnrtIsf +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Qp6Erv1ze3 +++ cat /tmp/tmp.OiUwnrtIsf +++ rm /tmp/tmp.Qp6Erv1ze3 /tmp/tmp.OiUwnrtIsf +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=3 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 3 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.cfQBi00NmG ++++ mktemp +++ local LAST_ERR=/tmp/tmp.5LCZbuSgy3 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.cfQBi00NmG +++ cat /tmp/tmp.5LCZbuSgy3 +++ rm /tmp/tmp.cfQBi00NmG /tmp/tmp.5LCZbuSgy3 +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=4 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 4 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++++ mktemp +++ local LAST_OUT=/tmp/tmp.MasXEj1wGs ++++ mktemp +++ local LAST_ERR=/tmp/tmp.umiQpHOWkK +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.MasXEj1wGs +++ cat /tmp/tmp.umiQpHOWkK +++ rm /tmp/tmp.MasXEj1wGs /tmp/tmp.umiQpHOWkK +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=5 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 5 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.MVe3hgIpxH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SNgSDd75Or +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.MVe3hgIpxH +++ cat /tmp/tmp.SNgSDd75Or +++ rm /tmp/tmp.MVe3hgIpxH /tmp/tmp.SNgSDd75Or +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=6 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 6 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.l1rjTNF0fY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.A1qmhg8XeF +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.l1rjTNF0fY +++ cat /tmp/tmp.A1qmhg8XeF +++ rm /tmp/tmp.l1rjTNF0fY /tmp/tmp.A1qmhg8XeF +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=7 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 7 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nJ9ou4Qp45 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.lhpEqEZfAR +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.nJ9ou4Qp45 +++ cat /tmp/tmp.lhpEqEZfAR +++ rm /tmp/tmp.nJ9ou4Qp45 /tmp/tmp.lhpEqEZfAR +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=8 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 8 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.jLkjV01qXB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4wA7XTUTZL +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.jLkjV01qXB +++ cat /tmp/tmp.4wA7XTUTZL +++ rm /tmp/tmp.jLkjV01qXB /tmp/tmp.4wA7XTUTZL +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=9 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 9 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.spjAZUyNEU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ALPZHPMiI0 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.spjAZUyNEU +++ cat /tmp/tmp.ALPZHPMiI0 +++ rm /tmp/tmp.spjAZUyNEU /tmp/tmp.ALPZHPMiI0 +++ return 0 ++ echo 1742787864 + latest_ts=1742787864 + retries=10 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 03:44:24) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742787864 -gt 1742787864 ]] + [[ 10 -gt 30 ]] ++ get_latest_oplog_chunk_ts some-name ++ local cluster=some-name +++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ jq '.backups.pitrChunks.pitrChunks | last | .range.end' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.hGXXe31Uto ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ARqXJDmnjo +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.hGXXe31Uto +++ cat /tmp/tmp.ARqXJDmnjo +++ rm /tmp/tmp.hGXXe31Uto /tmp/tmp.ARqXJDmnjo +++ return 0 ++ echo 1742789223 + latest_ts=1742789223 + retries=11 ++ format_date 1742789223 ++ local timestamp=1742789223 +++ TZ=UTC +++ /usr/bin/date -d@1742789223 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 04:07:03 ++ format_date 1742787864 ++ local timestamp=1742787864 +++ TZ=UTC +++ /usr/bin/date -d@1742787864 '+%Y-%m-%d %H:%M:%S' ++ echo 2025-03-24 03:44:24 + echo 'Waiting for last oplog chunk (2025-03-24 04:07:03) to be 120 seconds older than starting chunk (2025-03-24 03:44:24)' Waiting for last oplog chunk (2025-03-24 04:07:03) to be 120 seconds older than starting chunk (2025-03-24 03:44:24) + sleep 10 + [[ 1742789223 -gt 1742787864 ]] + desc 'check restore by latest' + set +o xtrace ----------------------------------------------------------------------------------- check restore by latest ----------------------------------------------------------------------------------- + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio-3/' + '[' -z '' ']' + /usr/bin/sed -e /date:/d + /usr/bin/sed -e 's/type:/type: latest/' + kubectl_bin apply -f - ++ mktemp + /usr/bin/sed -e 's/backupName:/backupName: backup-minio-3/' + local LAST_OUT=/tmp/tmp.0LkGttzPxi ++ mktemp + local LAST_ERR=/tmp/tmp.MspNwPEWU0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0LkGttzPxi perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-3 created + cat /tmp/tmp.MspNwPEWU0 + rm /tmp/tmp.0LkGttzPxi /tmp/tmp.MspNwPEWU0 + return 0 + wait_restore backup-minio-3 some-name requested 0 900 + local backup_name=backup-minio-3 + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=900 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio-3 to reach requested state.................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................OK + '[' 0 -eq 1 ']' + echo + wait_restore backup-minio-3 some-name ready 0 1600 + local backup_name=backup-minio-3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1600 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio-3 to reach ready state................................................................................................................OK + '[' 0 -eq 1 ']' + echo + set -o xtrace + wait_for_running some-name-mongos 3 + local name=some-name-mongos + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=mongos + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-mongos-0 + local pod=some-name-mongos-0 + set +o xtrace waiting for pod/some-name-mongos-0 to be ready....................OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-mongos-1 + local pod=some-name-mongos-1 + set +o xtrace waiting for pod/some-name-mongos-1 to be ready......OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yh1QPYKYnw +++ mktemp ++ local LAST_ERR=/tmp/tmp.hMIdolN7WV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yh1QPYKYnw ++ cat /tmp/tmp.hMIdolN7WV ++ rm /tmp/tmp.yh1QPYKYnw /tmp/tmp.hMIdolN7WV ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-mongos-2 + local pod=some-name-mongos-2 + set +o xtrace waiting for pod/some-name-mongos-2 to be ready.......OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pf09Ko0aPN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ukJwShdtlX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="mongos")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Pf09Ko0aPN ++ cat /tmp/tmp.ukJwShdtlX ++ rm /tmp/tmp.Pf09Ko0aPN /tmp/tmp.ukJwShdtlX ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + sleep 10 + compare_mongos_cmd find myApp:myPass@some-name-mongos.pitr-physical-12141 -3rd + local command=find + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local postfix=-3rd + local suffix= + local database=myApp + local collection=test + local port=27017 + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.pitr-physical-12141 mongodb '' '' 27017 + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-mongos.pitr-physical-12141 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local port=27017 + local mongo_bin=mongo + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ echo .svc.cluster.local ++ awk -F: '{print $2}' + suffix_port= + [[ -z '' ]] + suffix=.svc.cluster.local:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cNe5FosrKA +++ mktemp ++ local LAST_ERR=/tmp/tmp.miIt55ScyL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cNe5FosrKA ++ cat /tmp/tmp.miIt55ScyL ++ rm /tmp/tmp.cNe5FosrKA /tmp/tmp.miIt55ScyL ++ return 0 + local client_container=psmdb-client-874f474b6-bpwdk + kubectl_bin exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.AhsWu4w3oP ++ mktemp + local LAST_ERR=/tmp/tmp.cI3iarhvB1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-874f474b6-bpwdk -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.pitr-physical-12141.svc.cluster.local:27017/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AhsWu4w3oP + cat /tmp/tmp.cI3iarhvB1 + rm /tmp/tmp.AhsWu4w3oP /tmp/tmp.cI3iarhvB1 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/e2e-tests/pitr-physical/compare/find-3rd.json /tmp/tmp.BqjbN3Laow/find-3rd + destroy pitr-physical-12141 + local namespace=pitr-physical-12141 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.i52ha9GXBp ++ mktemp + local LAST_ERR=/tmp/tmp.IS9X5pKTZW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.i52ha9GXBp customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.IS9X5pKTZW + rm /tmp/tmp.i52ha9GXBp /tmp/tmp.IS9X5pKTZW + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.TjLgKhPjCv ++ mktemp + local LAST_ERR=/tmp/tmp.2ogiHZWV73 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TjLgKhPjCv + cat /tmp/tmp.2ogiHZWV73 + rm /tmp/tmp.TjLgKhPjCv /tmp/tmp.2ogiHZWV73 + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.F4G9YepTtP ++ mktemp + local LAST_ERR=/tmp/tmp.F3wbBbvcB7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.F4G9YepTtP + cat /tmp/tmp.F3wbBbvcB7 + rm /tmp/tmp.F4G9YepTtP /tmp/tmp.F3wbBbvcB7 + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.Hxjp8X5pGd ++ mktemp + local LAST_ERR=/tmp/tmp.PwjnfrFR15 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Hxjp8X5pGd + cat /tmp/tmp.PwjnfrFR15 + rm /tmp/tmp.Hxjp8X5pGd /tmp/tmp.PwjnfrFR15 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.HDeW9xJCk1 ++ mktemp + local LAST_ERR=/tmp/tmp.9kIZ13yKZB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1872/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HDeW9xJCk1 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.9kIZ13yKZB + rm /tmp/tmp.HDeW9xJCk1 /tmp/tmp.9kIZ13yKZB + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.W8uiXhCE8V ++ mktemp + local LAST_ERR=/tmp/tmp.DzdjlMercl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.W8uiXhCE8V + cat /tmp/tmp.DzdjlMercl Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.W8uiXhCE8V + cat /tmp/tmp.DzdjlMercl Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.W8uiXhCE8V + cat /tmp/tmp.DzdjlMercl Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.W8uiXhCE8V + cat /tmp/tmp.DzdjlMercl Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.W8uiXhCE8V /tmp/tmp.DzdjlMercl + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace pitr-physical-12141 + rm -rf /tmp/tmp.BqjbN3Laow + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.Bb5eY2gYVo + local LAST_OUT=/tmp/tmp.9x7YcQy2Ok ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.w6uTVeluKF + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.9pY1zKfN23 + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pitr-physical-12141 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator