Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/logs/demand-backup.log Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra demand-backup-19332 + local ns=demand-backup-19332 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.FgRUaNRdu3 ++ mktemp + local LAST_ERR=/tmp/tmp.pSkhTsvDDb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FgRUaNRdu3 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.pSkhTsvDDb + rm /tmp/tmp.FgRUaNRdu3 /tmp/tmp.pSkhTsvDDb + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n demand-backup-8269 backup-azure-blob --type=merge -p '{"metadata":{"finalizers":[]}}' Error from server (NotFound): perconaservermongodbbackups.psmdb.percona.com "backup-azure-blob" not found + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n demand-backup-8269 backup-minio --type=merge -p '{"metadata":{"finalizers":[]}}' Error from server (NotFound): perconaservermongodbbackups.psmdb.percona.com "backup-minio" not found + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.lpFYfnUo7C ++ mktemp + local LAST_ERR=/tmp/tmp.J4F6KrjQ6Z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lpFYfnUo7C customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com condition met + cat /tmp/tmp.J4F6KrjQ6Z + rm /tmp/tmp.lpFYfnUo7C /tmp/tmp.J4F6KrjQ6Z + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.CFTnulr9oh ++ mktemp + local LAST_ERR=/tmp/tmp.6E4QxMFAFl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CFTnulr9oh + cat /tmp/tmp.6E4QxMFAFl + rm /tmp/tmp.CFTnulr9oh /tmp/tmp.6E4QxMFAFl + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.wMj5dIbsvr ++ mktemp + local LAST_ERR=/tmp/tmp.g6RQrLCQKx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wMj5dIbsvr + cat /tmp/tmp.g6RQrLCQKx + rm /tmp/tmp.wMj5dIbsvr /tmp/tmp.g6RQrLCQKx + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.dK1PKF11Bd ++ mktemp + local LAST_ERR=/tmp/tmp.ec1tJapbYp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dK1PKF11Bd clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.ec1tJapbYp + rm /tmp/tmp.dK1PKF11Bd /tmp/tmp.ec1tJapbYp + return 0 + check_crd_for_deletion PR-1961-7f9b8fd6 + local git_tag=PR-1961-7f9b8fd6 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1961-7f9b8fd6/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vKrfJjj0n9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sNvbtqRBZR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.vKrfJjj0n9 ++ cat /tmp/tmp.sNvbtqRBZR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.vKrfJjj0n9 ++ cat /tmp/tmp.sNvbtqRBZR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.vKrfJjj0n9 ++ cat /tmp/tmp.sNvbtqRBZR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.vKrfJjj0n9 ++ cat /tmp/tmp.sNvbtqRBZR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.vKrfJjj0n9 /tmp/tmp.sNvbtqRBZR ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' Unable to connect to the server: net/http: TLS handshake timeout + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' Unable to connect to the server: net/http: TLS handshake timeout + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' E0112 21:20:01.817839 134036 memcache.go:265] "Unhandled Error" err="couldn't get current server API group list: Get \"https://34.56.119.228/api?timeout=32s\": net/http: TLS handshake timeout" Unable to connect to the server: net/http: TLS handshake timeout ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' Unable to connect to the server: net/http: TLS handshake timeout + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' ++ mktemp + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.oOtfUhBegl ++ mktemp + local LAST_OUT=/tmp/tmp.xcrBJnJgEo ++ mktemp + local LAST_ERR=/tmp/tmp.otMEQ3A8d6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.Aij6Yl99Iy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oOtfUhBegl + cat /tmp/tmp.otMEQ3A8d6 + rm /tmp/tmp.oOtfUhBegl /tmp/tmp.otMEQ3A8d6 + return 0 namespace "demand-backup-8269" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xcrBJnJgEo namespace "psmdb-operator" deleted + cat /tmp/tmp.Aij6Yl99Iy + rm /tmp/tmp.xcrBJnJgEo /tmp/tmp.Aij6Yl99Iy + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.83MiDaqlRn ++ mktemp + local LAST_ERR=/tmp/tmp.NDrJETqPW9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.83MiDaqlRn + cat /tmp/tmp.NDrJETqPW9 + rm /tmp/tmp.83MiDaqlRn /tmp/tmp.NDrJETqPW9 + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.75cCGi2ORk ++ mktemp + local LAST_ERR=/tmp/tmp.id2AhMoXu1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.75cCGi2ORk namespace/psmdb-operator created + cat /tmp/tmp.id2AhMoXu1 + rm /tmp/tmp.75cCGi2ORk /tmp/tmp.id2AhMoXu1 + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.xacZeIjLl9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6DGq5pHOP5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xacZeIjLl9 ++ cat /tmp/tmp.6DGq5pHOP5 ++ rm /tmp/tmp.xacZeIjLl9 /tmp/tmp.6DGq5pHOP5 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1961-7f9b8fd6-3-cluster10 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.gSkEAS93FK ++ mktemp + local LAST_ERR=/tmp/tmp.0m84MO3vhe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1961-7f9b8fd6-3-cluster10 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gSkEAS93FK Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1961-7f9b8fd6-3-cluster10" modified. + cat /tmp/tmp.0m84MO3vhe + rm /tmp/tmp.gSkEAS93FK /tmp/tmp.0m84MO3vhe + return 0 + deploy_operator + desc 'start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-1961-7f9b8fd6' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-1961-7f9b8fd6 ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.lcBSoeM1V7 ++ mktemp + local LAST_ERR=/tmp/tmp.Hi8Ziw2ihJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lcBSoeM1V7 customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.Hi8Ziw2ihJ + rm /tmp/tmp.lcBSoeM1V7 /tmp/tmp.Hi8Ziw2ihJ + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.9fIaR0o4N6 ++ mktemp + local LAST_ERR=/tmp/tmp.hm7ERZ0vJR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9fIaR0o4N6 clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.hm7ERZ0vJR + rm /tmp/tmp.9fIaR0o4N6 /tmp/tmp.hm7ERZ0vJR + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1961-7f9b8fd6") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.2QmIAHseag ++ mktemp + local LAST_ERR=/tmp/tmp.uNNZ579pGM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2QmIAHseag deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.uNNZ579pGM + rm /tmp/tmp.2QmIAHseag /tmp/tmp.uNNZ579pGM + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.YbqZhP6yVk +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZZPzX9vvJF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YbqZhP6yVk ++ cat /tmp/tmp.ZZPzX9vvJF ++ rm /tmp/tmp.YbqZhP6yVk /tmp/tmp.ZZPzX9vvJF ++ return 0 + wait_operator_pod percona-server-mongodb-operator-7b94977d54-h4f97 + local pod=percona-server-mongodb-operator-7b94977d54-h4f97 + set +o xtrace waiting for pod/percona-server-mongodb-operator-7b94977d54-h4f97 to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.0IkvZoP9Rm +++ mktemp ++ local LAST_ERR=/tmp/tmp.059J5knFHL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0IkvZoP9Rm ++ cat /tmp/tmp.059J5knFHL ++ rm /tmp/tmp.0IkvZoP9Rm /tmp/tmp.059J5knFHL ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-7b94977d54-h4f97 ++ mktemp + local LAST_OUT=/tmp/tmp.hU7iEjw3DT ++ mktemp + local LAST_ERR=/tmp/tmp.t0IlM4ceJD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-7b94977d54-h4f97 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hU7iEjw3DT + cat /tmp/tmp.t0IlM4ceJD + rm /tmp/tmp.hU7iEjw3DT /tmp/tmp.t0IlM4ceJD + return 0 2026-01-12T21:20:47.331Z INFO setup Manager starting up {"gitCommit": "7f9b8fd660ef537a8ff13f3505cffba918719e96", "gitBranch": "PR-1961-7f9b8fd6", "buildTime": "", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} + create_namespace demand-backup-19332 + local namespace=demand-backup-19332 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.oHbEqmKFoc ++ mktemp + '[' -n '' ']' + desc 'cleaned up old namespaces demand-backup-19332' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-19332 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace demand-backup-19332 --ignore-not-found + awk '{print$1}' + local LAST_ERR=/tmp/tmp.EPaXgGaGak + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.ANcVbpWYMs ++ mktemp + local LAST_ERR=/tmp/tmp.QhJWRqecUK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace demand-backup-19332 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oHbEqmKFoc + cat /tmp/tmp.EPaXgGaGak + rm /tmp/tmp.oHbEqmKFoc /tmp/tmp.EPaXgGaGak + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ANcVbpWYMs + cat /tmp/tmp.QhJWRqecUK + rm /tmp/tmp.ANcVbpWYMs /tmp/tmp.QhJWRqecUK + return 0 + kubectl_bin wait --for=delete namespace demand-backup-19332 ++ mktemp + local LAST_OUT=/tmp/tmp.daOwdPg3Mg ++ mktemp + local LAST_ERR=/tmp/tmp.0YWd0FPI8h + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace demand-backup-19332 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.daOwdPg3Mg + cat /tmp/tmp.0YWd0FPI8h + rm /tmp/tmp.daOwdPg3Mg /tmp/tmp.0YWd0FPI8h + return 0 + desc 'create namespace demand-backup-19332' + set +o xtrace ----------------------------------------------------------------------------------- create namespace demand-backup-19332 ----------------------------------------------------------------------------------- + kubectl_bin create namespace demand-backup-19332 ++ mktemp + local LAST_OUT=/tmp/tmp.mCRppL5KCw ++ mktemp + local LAST_ERR=/tmp/tmp.x5jpXL6w2g + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace demand-backup-19332 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mCRppL5KCw namespace/demand-backup-19332 created + cat /tmp/tmp.x5jpXL6w2g + rm /tmp/tmp.mCRppL5KCw /tmp/tmp.x5jpXL6w2g + return 0 + set_kube_ctx demand-backup-19332 + local namespace=demand-backup-19332 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.asYFgAod4Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.L0yHC6xof5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.asYFgAod4Y ++ cat /tmp/tmp.L0yHC6xof5 ++ rm /tmp/tmp.asYFgAod4Y /tmp/tmp.L0yHC6xof5 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1961-7f9b8fd6-3-cluster10 --namespace=demand-backup-19332 ++ mktemp + local LAST_OUT=/tmp/tmp.6jH11ewbXo ++ mktemp + local LAST_ERR=/tmp/tmp.C3PNbPpV6p + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1961-7f9b8fd6-3-cluster10 --namespace=demand-backup-19332 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6jH11ewbXo Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1961-7f9b8fd6-3-cluster10" modified. + cat /tmp/tmp.C3PNbPpV6p + rm /tmp/tmp.6jH11ewbXo /tmp/tmp.C3PNbPpV6p + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Mon Jan 12 21:21:25 2026 NAMESPACE: demand-backup-19332 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-19332.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-19332 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-19332 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-19332 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-19332 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eLEdIzU1ml +++ mktemp ++ local LAST_ERR=/tmp/tmp.S892BEKWtu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eLEdIzU1ml ++ cat /tmp/tmp.S892BEKWtu ++ rm /tmp/tmp.eLEdIzU1ml /tmp/tmp.S892BEKWtu ++ return 0 + MINIO_POD=minio-service-d9589b474-bvls4 + wait_pod minio-service-d9589b474-bvls4 + local pod=minio-service-d9589b474-bvls4 + set +o xtrace waiting for pod/minio-service-d9589b474-bvls4 to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-19332.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.9IbdhogOlj ++ mktemp + local LAST_ERR=/tmp/tmp.2bHJwpUeJH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-19332.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9IbdhogOlj service/minio-service created + cat /tmp/tmp.2bHJwpUeJH + rm /tmp/tmp.9IbdhogOlj /tmp/tmp.2bHJwpUeJH + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.GYz0rC3apf ++ mktemp + local LAST_ERR=/tmp/tmp.2lW1TdtiS5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GYz0rC3apf make_bucket: operator-testing pod "aws-cli" deleted from demand-backup-19332 namespace + cat /tmp/tmp.2lW1TdtiS5 All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-19332 + rm /tmp/tmp.GYz0rC3apf /tmp/tmp.2lW1TdtiS5 + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + cluster=some-name-rs0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.U3Qedi6Y9L ++ mktemp + local LAST_ERR=/tmp/tmp.A3goMb1qTD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.U3Qedi6Y9L secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.A3goMb1qTD + rm /tmp/tmp.U3Qedi6Y9L /tmp/tmp.A3goMb1qTD + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.kLmSidcuIY ++ mktemp + local LAST_ERR=/tmp/tmp.xgdq2p8Fr0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kLmSidcuIY secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created secret/gcp-cs-sa-key-secret created + cat /tmp/tmp.xgdq2p8Fr0 + rm /tmp/tmp.kLmSidcuIY /tmp/tmp.xgdq2p8Fr0 + return 0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/some-name-rs0.yml + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' ++ mktemp + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1961-7f9b8fd6"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.TiY6LG1C3o + /usr/sbin/sed -e s/NAME_SPACE/demand-backup-19332/g ++ mktemp + local LAST_ERR=/tmp/tmp.h7UCaFKc4V + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TiY6LG1C3o perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.h7UCaFKc4V + rm /tmp/tmp.TiY6LG1C3o /tmp/tmp.h7UCaFKc4V + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.....OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready......OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aU2uYPArFv +++ mktemp ++ local LAST_ERR=/tmp/tmp.aBe2FlwFNO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aU2uYPArFv ++ cat /tmp/tmp.aBe2FlwFNO ++ rm /tmp/tmp.aU2uYPArFv /tmp/tmp.aBe2FlwFNO ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready......OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eHqJc7DCsL +++ mktemp ++ local LAST_ERR=/tmp/tmp.kwkLv5EMRE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eHqJc7DCsL ++ cat /tmp/tmp.kwkLv5EMRE ++ rm /tmp/tmp.eHqJc7DCsL /tmp/tmp.kwkLv5EMRE ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wCIxVf7SdE +++ mktemp ++ local LAST_ERR=/tmp/tmp.g7TegD8RTr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wCIxVf7SdE ++ cat /tmp/tmp.g7TegD8RTr ++ rm /tmp/tmp.wCIxVf7SdE /tmp/tmp.g7TegD8RTr ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness................. + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.lfGJl3onxJ/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-19332", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.LJZN4JIM96 ++ mktemp + local LAST_ERR=/tmp/tmp.X0zCm7yAkH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LJZN4JIM96 + cat /tmp/tmp.X0zCm7yAkH + rm /tmp/tmp.LJZN4JIM96 /tmp/tmp.X0zCm7yAkH + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.lfGJl3onxJ/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.lfGJl3onxJ/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.lfGJl3onxJ/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml /tmp/tmp.lfGJl3onxJ/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2026-01-12T21:24:04+0000] compare_kubectl: statefulset/some-name-rs0 OK + desc 'create user' + set +o xtrace ----------------------------------------------------------------------------------- create user ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b9SrCZoVul +++ mktemp ++ local LAST_ERR=/tmp/tmp.SomTZtQNCw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.b9SrCZoVul ++ cat /tmp/tmp.SomTZtQNCw ++ rm /tmp/tmp.b9SrCZoVul /tmp/tmp.SomTZtQNCw ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.mf3WQszl3W ++ mktemp + local LAST_ERR=/tmp/tmp.pI9R8fcdLP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mf3WQszl3W Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("dce9f226-cc67-4a0e-819c-d56309e97e88") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.pI9R8fcdLP + rm /tmp/tmp.mf3WQszl3W /tmp/tmp.pI9R8fcdLP + return 0 + sleep 2 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.05I0D1GNRW +++ mktemp ++ local LAST_ERR=/tmp/tmp.qXpB8bXIy2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.05I0D1GNRW ++ cat /tmp/tmp.qXpB8bXIy2 ++ rm /tmp/tmp.05I0D1GNRW /tmp/tmp.qXpB8bXIy2 ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.YfC9aplhg4 ++ mktemp + local LAST_ERR=/tmp/tmp.YLSviYYIx0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YfC9aplhg4 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("5f56cbe3-47b7-4681-be30-e35631e3be68") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.YLSviYYIx0 + rm /tmp/tmp.YfC9aplhg4 /tmp/tmp.YLSviYYIx0 + return 0 + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m4d2oUV0rG +++ mktemp ++ local LAST_ERR=/tmp/tmp.46NzvRZtvp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.m4d2oUV0rG ++ cat /tmp/tmp.46NzvRZtvp ++ rm /tmp/tmp.m4d2oUV0rG /tmp/tmp.46NzvRZtvp ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.IHZoS3XhEC ++ mktemp + local LAST_ERR=/tmp/tmp.z5dmLNBjcL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IHZoS3XhEC Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("743297c3-d88a-42ab-9a1b-466e1510ce7c") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.z5dmLNBjcL + rm /tmp/tmp.IHZoS3XhEC /tmp/tmp.z5dmLNBjcL + return 0 + run_mongo 'use myApp\n db.test3.insert({ x: 100502 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test3.insert({ x: 100502 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MgdWIDpDNN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ig5pKuPvOv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MgdWIDpDNN ++ cat /tmp/tmp.ig5pKuPvOv ++ rm /tmp/tmp.MgdWIDpDNN /tmp/tmp.ig5pKuPvOv ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test3.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.brxdSN8OZJ ++ mktemp + local LAST_ERR=/tmp/tmp.bePU4lfuah + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test3.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.brxdSN8OZJ Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("11466ecf-8a77-49cb-a593-f5a966b6b799") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.bePU4lfuah + rm /tmp/tmp.brxdSN8OZJ /tmp/tmp.bePU4lfuah + return 0 + custom_user_name=test1user + custom_role_name=test1role + run_mongo 'use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.btRKkU4mGT +++ mktemp ++ local LAST_ERR=/tmp/tmp.niTxQUxXiH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.btRKkU4mGT ++ cat /tmp/tmp.niTxQUxXiH ++ rm /tmp/tmp.btRKkU4mGT /tmp/tmp.niTxQUxXiH ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.zNgP5t88GE ++ mktemp + local LAST_ERR=/tmp/tmp.2bjgB32xNS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zNgP5t88GE Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("4b8c6840-05d5-45bc-a24a-c87caa471249") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp Successfully added user: { "user" : "test1user", "roles" : [ ] } bye + cat /tmp/tmp.2bjgB32xNS + rm /tmp/tmp.zNgP5t88GE /tmp/tmp.2bjgB32xNS + return 0 + run_mongo 'use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.efNimqhEVW +++ mktemp ++ local LAST_ERR=/tmp/tmp.ckgXmbMz17 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.efNimqhEVW ++ cat /tmp/tmp.ckgXmbMz17 ++ rm /tmp/tmp.efNimqhEVW /tmp/tmp.ckgXmbMz17 ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.JShflcTt4c ++ mktemp + local LAST_ERR=/tmp/tmp.TqVsshJSOY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JShflcTt4c Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("e29bba55-2156-4828-8e1e-0b185f46681d") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp { "role" : "test1role", "privileges" : [ ], "roles" : [ ] } bye + cat /tmp/tmp.TqVsshJSOY + rm /tmp/tmp.JShflcTt4c /tmp/tmp.TqVsshJSOY + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:24:22+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nEylCqe71Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.MQP6dQ6QaY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nEylCqe71Y ++ cat /tmp/tmp.MQP6dQ6QaY ++ rm /tmp/tmp.nEylCqe71Y /tmp/tmp.MQP6dQ6QaY ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.sv9zFDVntK ++ mktemp + local LAST_ERR=/tmp/tmp.628beJEUcz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sv9zFDVntK + cat /tmp/tmp.628beJEUcz + rm /tmp/tmp.sv9zFDVntK /tmp/tmp.628beJEUcz + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:24:25+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gKqMn7CCh5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EXDb7o4dpN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gKqMn7CCh5 ++ cat /tmp/tmp.EXDb7o4dpN ++ rm /tmp/tmp.gKqMn7CCh5 /tmp/tmp.EXDb7o4dpN ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.4a5ixHOnGB ++ mktemp + local LAST_ERR=/tmp/tmp.WI5WDHyiSI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4a5ixHOnGB + cat /tmp/tmp.WI5WDHyiSI + rm /tmp/tmp.4a5ixHOnGB /tmp/tmp.WI5WDHyiSI + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:24:28+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W1stbyIakr +++ mktemp ++ local LAST_ERR=/tmp/tmp.NiDriOij5e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.W1stbyIakr ++ cat /tmp/tmp.NiDriOij5e ++ rm /tmp/tmp.W1stbyIakr /tmp/tmp.NiDriOij5e ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.WFHarA6Ubq ++ mktemp + local LAST_ERR=/tmp/tmp.w4g0haEscj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WFHarA6Ubq + cat /tmp/tmp.w4g0haEscj + rm /tmp/tmp.WFHarA6Ubq /tmp/tmp.w4g0haEscj + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2026-01-12T21:23:28.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2026-01-12T21:23:57.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2026-01-12T21:24:02.000+0000 I listening for the commands + '[' -z '' ']' + desc 'Check GCS profiles' + set +o xtrace ----------------------------------------------------------------------------------- Check GCS profiles ----------------------------------------------------------------------------------- + compare_pbm_profile_setup some-name gcp-cs-s3 + local cluster=some-name + local profile=gcp-cs-s3 + local container=backup-agent + local pbm_binary=pbm + [[ backup-agent == \m\o\n\g\o\d ]] + kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm profile show gcp-cs-s3 ++ mktemp + local LAST_OUT=/tmp/tmp.9RUbtYOEXK ++ mktemp + local LAST_ERR=/tmp/tmp.n8HqXxWIP6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec some-name-rs0-0 -c backup-agent -- pbm profile show gcp-cs-s3 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9RUbtYOEXK + cat /tmp/tmp.n8HqXxWIP6 + rm /tmp/tmp.9RUbtYOEXK /tmp/tmp.n8HqXxWIP6 + return 0 + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/pbm_profile_gcp-cs-s3.yml /tmp/tmp.lfGJl3onxJ/pbm_profile_gcp-cs-s3.yml + compare_pbm_profile_setup some-name gcp-cs-sa + local cluster=some-name + local profile=gcp-cs-sa + local container=backup-agent + local pbm_binary=pbm + [[ backup-agent == \m\o\n\g\o\d ]] + kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm profile show gcp-cs-sa ++ mktemp + local LAST_OUT=/tmp/tmp.ptRxvlwo41 ++ mktemp + local LAST_ERR=/tmp/tmp.ZE4VAgad9O + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec some-name-rs0-0 -c backup-agent -- pbm profile show gcp-cs-sa + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ptRxvlwo41 + cat /tmp/tmp.ZE4VAgad9O + rm /tmp/tmp.ptRxvlwo41 /tmp/tmp.ZE4VAgad9O + return 0 + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/pbm_profile_gcp-cs-sa.yml /tmp/tmp.lfGJl3onxJ/pbm_profile_gcp-cs-sa.yml + backup_name_minio=backup-minio + '[' -z '' ']' + backup_name_aws=backup-aws-s3 + backup_name_gcp_s3=backup-gcp-cs-s3 + backup_name_gcp_sa=backup-gcp-cs-sa + backup_name_azure=backup-azure-blob + desc 'run backups' + set +o xtrace ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local type=logical + log 'running backup backup-aws-s3' + set +o xtrace [2026-01-12T21:24:37+0000] running backup backup-aws-s3 + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-aws-s3.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.2ZNpAytZG1 ++ mktemp + local LAST_ERR=/tmp/tmp.C9lDdJSYpo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2ZNpAytZG1 perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.C9lDdJSYpo + rm /tmp/tmp.2ZNpAytZG1 /tmp/tmp.C9lDdJSYpo + return 0 + run_backup gcp-cs-s3 + local storage=gcp-cs-s3 + local backup_name=backup-gcp-cs-s3 + local type=logical + log 'running backup backup-gcp-cs-s3' + set +o xtrace [2026-01-12T21:24:40+0000] running backup backup-gcp-cs-s3 + yq eval '.metadata.name = "backup-gcp-cs-s3" | .spec.storageName = "gcp-cs-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-gcp-cs-s3.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.9vUStKV5tx ++ mktemp + local LAST_ERR=/tmp/tmp.FVJCc923Qd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9vUStKV5tx perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs-s3 created + cat /tmp/tmp.FVJCc923Qd + rm /tmp/tmp.9vUStKV5tx /tmp/tmp.FVJCc923Qd + return 0 + run_backup gcp-cs-sa + local storage=gcp-cs-sa + local backup_name=backup-gcp-cs-sa + local type=logical + log 'running backup backup-gcp-cs-sa' + set +o xtrace [2026-01-12T21:24:42+0000] running backup backup-gcp-cs-sa + yq eval '.metadata.name = "backup-gcp-cs-sa" | .spec.storageName = "gcp-cs-sa" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-gcp-cs-sa.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.eVeSuuCaFq ++ mktemp + local LAST_ERR=/tmp/tmp.SHx7DSRUWg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eVeSuuCaFq perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs-sa created + cat /tmp/tmp.SHx7DSRUWg + rm /tmp/tmp.eVeSuuCaFq /tmp/tmp.SHx7DSRUWg + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local type=logical + log 'running backup backup-azure-blob' + set +o xtrace [2026-01-12T21:24:45+0000] running backup backup-azure-blob + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-azure-blob.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.KAFx5gSqty ++ mktemp + local LAST_ERR=/tmp/tmp.6YA5kFF6d2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KAFx5gSqty perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.6YA5kFF6d2 + rm /tmp/tmp.KAFx5gSqty /tmp/tmp.6YA5kFF6d2 + return 0 + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + log 'running backup backup-minio' + set +o xtrace [2026-01-12T21:24:47+0000] running backup backup-minio + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ecr4DCZrGT ++ mktemp + local LAST_ERR=/tmp/tmp.YhPXmEiTcI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ecr4DCZrGT perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.YhPXmEiTcI + rm /tmp/tmp.ecr4DCZrGT /tmp/tmp.YhPXmEiTcI + return 0 + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state..OK + wait_backup backup-gcp-cs-s3 + local backup_name=backup-gcp-cs-s3 + local target_state=ready + set +o xtrace waiting for backup-gcp-cs-s3 to reach ready state............OK + wait_backup backup-gcp-cs-sa + local backup_name=backup-gcp-cs-sa + local target_state=ready + set +o xtrace waiting for backup-gcp-cs-sa to reach ready state............OK + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state..OK + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state..OK + sleep 5 + '[' -z '' ']' + desc 'check backup and restore -- aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- ++ get_backup_dest backup-aws-s3 ++ local backup_name=backup-aws-s3 ++ kubectl_bin get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed 's|gs://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zeo57MlvIm +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ij5WzV3VV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zeo57MlvIm ++ cat /tmp/tmp.7ij5WzV3VV ++ rm /tmp/tmp.zeo57MlvIm /tmp/tmp.7ij5WzV3VV ++ return 0 + backup_dest_aws=operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z + setup_aws_credentials + local secret_name=aws-s3-secret + [[ -n AKIARXP3OARBHR2RVML2 ]] + [[ -n YJw4YBqTnqLLKr8cCKKNGvtAjRYcz/iKqdWOBKy1 ]] + echo 'AWS credentials already set in environment' AWS credentials already set in environment + return 0 + check_backup_existence_aws operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z /rs0/myApp.test.gz ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z ++ cut -d/ -f1 + bucket=operator-testing ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z ++ cut -d/ -f2- + key_prefix=psmdb-demand-backup/2026-01-12T21:24:40Z + key=/rs0/myApp.test.gz + storage_name=aws-s3 + retry=0 + aws s3api head-object --bucket operator-testing --key psmdb-demand-backup/2026-01-12T21:24:40Z/rs0/myApp.test.gz + echo 'Backup operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z//rs0/myApp.test.gz found in aws-s3' Backup operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z//rs0/myApp.test.gz found in aws-s3 + run_recovery_check backup-aws-s3 some-name-rs0 + local backup=backup-aws-s3 + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XIzqxLeFVN +++ mktemp ++ local LAST_ERR=/tmp/tmp.qMS0r6nFsm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XIzqxLeFVN ++ cat /tmp/tmp.qMS0r6nFsm ++ rm /tmp/tmp.XIzqxLeFVN /tmp/tmp.qMS0r6nFsm ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.5o0b8q4wsf ++ mktemp + local LAST_ERR=/tmp/tmp.dd49LSEFYr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5o0b8q4wsf Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d52614b6-4394-4f37-a350-687756ce19c7") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.dd49LSEFYr + rm /tmp/tmp.5o0b8q4wsf /tmp/tmp.dd49LSEFYr + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-19332 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:25:50+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-19332 mongodb .svc.cluster.local '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oG5i3FUrbG +++ mktemp ++ local LAST_ERR=/tmp/tmp.mYRv8IYJaM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oG5i3FUrbG ++ cat /tmp/tmp.mYRv8IYJaM ++ rm /tmp/tmp.oG5i3FUrbG /tmp/tmp.mYRv8IYJaM ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.b4REJ62aAI ++ mktemp + local LAST_ERR=/tmp/tmp.i4whRtZX5j + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.b4REJ62aAI + cat /tmp/tmp.i4whRtZX5j + rm /tmp/tmp.b4REJ62aAI /tmp/tmp.i4whRtZX5j + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.lfGJl3onxJ/find-2nd + run_restore backup-aws-s3 + local backup_name=backup-aws-s3 + local restore_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + log 'running restore restore-backup-aws-s3' + set +o xtrace [2026-01-12T21:25:52+0000] running restore restore-backup-aws-s3 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-aws-s3/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-aws-s3/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.1o80o0HNFS ++ mktemp + local LAST_ERR=/tmp/tmp.VANyXgdAjp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1o80o0HNFS perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created + cat /tmp/tmp.VANyXgdAjp + rm /tmp/tmp.1o80o0HNFS /tmp/tmp.VANyXgdAjp + return 0 + wait_restore backup-aws-s3 some-name + local backup_name=backup-aws-s3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-aws-s3 object to be created.OK Waiting psmdb-restore/restore-backup-aws-s3 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.18dVoHZy7T +++ mktemp ++ local LAST_ERR=/tmp/tmp.Aa4yGylxjP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.18dVoHZy7T ++ cat /tmp/tmp.Aa4yGylxjP ++ rm /tmp/tmp.18dVoHZy7T /tmp/tmp.Aa4yGylxjP ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:26:20+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PBUL6U7YDm +++ mktemp ++ local LAST_ERR=/tmp/tmp.mqLSlNmCwQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PBUL6U7YDm ++ cat /tmp/tmp.mqLSlNmCwQ ++ rm /tmp/tmp.PBUL6U7YDm /tmp/tmp.mqLSlNmCwQ ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LJikkJp76d ++ mktemp + local LAST_ERR=/tmp/tmp.MexEFHqsdB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LJikkJp76d + cat /tmp/tmp.MexEFHqsdB + rm /tmp/tmp.LJikkJp76d /tmp/tmp.MexEFHqsdB + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:26:23+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local driver=mongodb + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NbeiB3dEe5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IfKPcD7ets ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NbeiB3dEe5 ++ cat /tmp/tmp.IfKPcD7ets ++ rm /tmp/tmp.NbeiB3dEe5 /tmp/tmp.IfKPcD7ets ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.peRJ2AiXoR ++ mktemp + local LAST_ERR=/tmp/tmp.2PTX3GYO5T + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.peRJ2AiXoR + cat /tmp/tmp.2PTX3GYO5T + rm /tmp/tmp.peRJ2AiXoR /tmp/tmp.2PTX3GYO5T + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:26:25+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0tU17pSdN9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BxosbEp6NU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0tU17pSdN9 ++ cat /tmp/tmp.BxosbEp6NU ++ rm /tmp/tmp.0tU17pSdN9 /tmp/tmp.BxosbEp6NU ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.m2b3Fk7DNB ++ mktemp + local LAST_ERR=/tmp/tmp.fg0GE91QFu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.m2b3Fk7DNB + cat /tmp/tmp.fg0GE91QFu + rm /tmp/tmp.m2b3Fk7DNB /tmp/tmp.fg0GE91QFu + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + desc 'check backup and restore -- gcp-cs-s3' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs-s3 ----------------------------------------------------------------------------------- ++ get_backup_dest backup-gcp-cs-s3 ++ local backup_name=backup-gcp-cs-s3 ++ kubectl_bin get psmdb-backup backup-gcp-cs-s3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.Fc7x9k2FAg +++ mktemp ++ local LAST_ERR=/tmp/tmp.LP6sv5SLkw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Fc7x9k2FAg ++ cat /tmp/tmp.LP6sv5SLkw ++ rm /tmp/tmp.Fc7x9k2FAg /tmp/tmp.LP6sv5SLkw ++ return 0 + backup_dest_gcp_s3=operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z + setup_gcs_credentials + local secret_name=gcp-cs-secret + gsutil ls + echo 'Setting up GCS credentials from K8s secret: gcp-cs-secret' Setting up GCS credentials from K8s secret: gcp-cs-secret + local trace_was_on=0 + [[ ehxB == *x* ]] + trace_was_on=1 + set +x + echo 'GCS credentials configured successfully' GCS credentials configured successfully + check_backup_existence_gcs operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z /rs0/myApp.test.gz + backup_dest_gcp=operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z + obj=/rs0/myApp.test.gz + storage_name=gcp-cs + retry=0 + gcs_path=gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z/rs0/myApp.test.gz + gsutil ls gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z/rs0/myApp.test.gz + echo 'Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z/rs0/myApp.test.gz found in gcp-cs' Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z/rs0/myApp.test.gz found in gcp-cs + run_recovery_check backup-gcp-cs-s3 some-name-rs0 + local backup=backup-gcp-cs-s3 + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nbtWzKwq3p +++ mktemp ++ local LAST_ERR=/tmp/tmp.iKxVTGDxXi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nbtWzKwq3p ++ cat /tmp/tmp.iKxVTGDxXi ++ rm /tmp/tmp.nbtWzKwq3p /tmp/tmp.iKxVTGDxXi ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bYhQRznAi0 ++ mktemp + local LAST_ERR=/tmp/tmp.ucwnLPzLTm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bYhQRznAi0 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ad68f5ff-aab5-4fc8-ad6a-c5782a105e22") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.ucwnLPzLTm + rm /tmp/tmp.bYhQRznAi0 /tmp/tmp.ucwnLPzLTm + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-19332 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:26:37+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-19332 mongodb .svc.cluster.local '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SBlV6n6xxl +++ mktemp ++ local LAST_ERR=/tmp/tmp.HMeTT6vO8J ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SBlV6n6xxl ++ cat /tmp/tmp.HMeTT6vO8J ++ rm /tmp/tmp.SBlV6n6xxl /tmp/tmp.HMeTT6vO8J ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.2wNv0tvPjI ++ mktemp + local LAST_ERR=/tmp/tmp.K1KLiOXZ2K + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2wNv0tvPjI + cat /tmp/tmp.K1KLiOXZ2K + rm /tmp/tmp.2wNv0tvPjI /tmp/tmp.K1KLiOXZ2K + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.lfGJl3onxJ/find-2nd + run_restore backup-gcp-cs-s3 + local backup_name=backup-gcp-cs-s3 + local restore_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + log 'running restore restore-backup-gcp-cs-s3' + set +o xtrace [2026-01-12T21:26:39+0000] running restore restore-backup-gcp-cs-s3 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-gcp-cs-s3/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-gcp-cs-s3/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Els4Qc2e3V ++ mktemp + local LAST_ERR=/tmp/tmp.pMTF6uN6Al + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Els4Qc2e3V perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs-s3 created + cat /tmp/tmp.pMTF6uN6Al + rm /tmp/tmp.Els4Qc2e3V /tmp/tmp.pMTF6uN6Al + return 0 + wait_restore backup-gcp-cs-s3 some-name + local backup_name=backup-gcp-cs-s3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-gcp-cs-s3 object to be created.OK Waiting psmdb-restore/restore-backup-gcp-cs-s3 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xRoWOLNG7y +++ mktemp ++ local LAST_ERR=/tmp/tmp.VHbQmPDACc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xRoWOLNG7y ++ cat /tmp/tmp.VHbQmPDACc ++ rm /tmp/tmp.xRoWOLNG7y /tmp/tmp.VHbQmPDACc ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:27:05+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7qymUmqtCf +++ mktemp ++ local LAST_ERR=/tmp/tmp.MOcRom9StA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7qymUmqtCf ++ cat /tmp/tmp.MOcRom9StA ++ rm /tmp/tmp.7qymUmqtCf /tmp/tmp.MOcRom9StA ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Hb3h6oqTb1 ++ mktemp + local LAST_ERR=/tmp/tmp.DPUFqHh4Bl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Hb3h6oqTb1 + cat /tmp/tmp.DPUFqHh4Bl + rm /tmp/tmp.Hb3h6oqTb1 /tmp/tmp.DPUFqHh4Bl + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:27:08+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OCplOymVNv +++ mktemp ++ local LAST_ERR=/tmp/tmp.sTfpQmS9Wk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OCplOymVNv ++ cat /tmp/tmp.sTfpQmS9Wk ++ rm /tmp/tmp.OCplOymVNv /tmp/tmp.sTfpQmS9Wk ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.huM66drFa7 ++ mktemp + local LAST_ERR=/tmp/tmp.UCP2M4e7Br + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.huM66drFa7 + cat /tmp/tmp.UCP2M4e7Br + rm /tmp/tmp.huM66drFa7 /tmp/tmp.UCP2M4e7Br + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:27:10+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bdoCYzvi0S +++ mktemp ++ local LAST_ERR=/tmp/tmp.KfUMK8EqLN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bdoCYzvi0S ++ cat /tmp/tmp.KfUMK8EqLN ++ rm /tmp/tmp.bdoCYzvi0S /tmp/tmp.KfUMK8EqLN ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.gq0wJ3WnGG ++ mktemp + local LAST_ERR=/tmp/tmp.R3r0rFsSsm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gq0wJ3WnGG + cat /tmp/tmp.R3r0rFsSsm + rm /tmp/tmp.gq0wJ3WnGG /tmp/tmp.R3r0rFsSsm + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + desc 'check backup and restore -- gcp-cs-sa' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs-sa ----------------------------------------------------------------------------------- ++ get_backup_dest backup-gcp-cs-sa ++ local backup_name=backup-gcp-cs-sa ++ kubectl_bin get psmdb-backup backup-gcp-cs-sa -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed 's|gs://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HOdkxhVD7O +++ mktemp ++ local LAST_ERR=/tmp/tmp.aR4TYBT7gc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs-sa -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HOdkxhVD7O ++ cat /tmp/tmp.aR4TYBT7gc ++ rm /tmp/tmp.HOdkxhVD7O /tmp/tmp.aR4TYBT7gc ++ return 0 + backup_dest_gcp_sa=operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z + check_backup_existence_gcs operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z /rs0/myApp.test.gz + backup_dest_gcp=operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z + obj=/rs0/myApp.test.gz + storage_name=gcp-cs + retry=0 + gcs_path=gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z/rs0/myApp.test.gz + gsutil ls gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z/rs0/myApp.test.gz + echo 'Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z/rs0/myApp.test.gz found in gcp-cs' Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z/rs0/myApp.test.gz found in gcp-cs + run_recovery_check backup-gcp-cs-sa some-name-rs0 + local backup=backup-gcp-cs-sa + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eYKJwpQW77 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rfwXknSTe7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eYKJwpQW77 ++ cat /tmp/tmp.rfwXknSTe7 ++ rm /tmp/tmp.eYKJwpQW77 /tmp/tmp.rfwXknSTe7 ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.htgeehz9Jt ++ mktemp + local LAST_ERR=/tmp/tmp.2BC9obdY2w + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.htgeehz9Jt Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("5ae5a308-4e04-4da0-b525-862d2e1a6fc9") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.2BC9obdY2w + rm /tmp/tmp.htgeehz9Jt /tmp/tmp.2BC9obdY2w + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-19332 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:27:18+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-19332 mongodb .svc.cluster.local '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EajZgHMo9g +++ mktemp ++ local LAST_ERR=/tmp/tmp.9kaxngqK2X ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EajZgHMo9g ++ cat /tmp/tmp.9kaxngqK2X ++ rm /tmp/tmp.EajZgHMo9g /tmp/tmp.9kaxngqK2X ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.TAkcyjRO53 ++ mktemp + local LAST_ERR=/tmp/tmp.bLVH8A77XV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TAkcyjRO53 + cat /tmp/tmp.bLVH8A77XV + rm /tmp/tmp.TAkcyjRO53 /tmp/tmp.bLVH8A77XV + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.lfGJl3onxJ/find-2nd + run_restore backup-gcp-cs-sa + local backup_name=backup-gcp-cs-sa + local restore_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + log 'running restore restore-backup-gcp-cs-sa' + set +o xtrace [2026-01-12T21:27:20+0000] running restore restore-backup-gcp-cs-sa + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-gcp-cs-sa/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-gcp-cs-sa/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.dMJFSZRd2x ++ mktemp + local LAST_ERR=/tmp/tmp.xxUqNYwN4x + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dMJFSZRd2x perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs-sa created + cat /tmp/tmp.xxUqNYwN4x + rm /tmp/tmp.dMJFSZRd2x /tmp/tmp.xxUqNYwN4x + return 0 + wait_restore backup-gcp-cs-sa some-name + local backup_name=backup-gcp-cs-sa + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-gcp-cs-sa object to be created.OK Waiting psmdb-restore/restore-backup-gcp-cs-sa to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8PzqK5gHAj +++ mktemp ++ local LAST_ERR=/tmp/tmp.9VDJb0jIe3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8PzqK5gHAj ++ cat /tmp/tmp.9VDJb0jIe3 ++ rm /tmp/tmp.8PzqK5gHAj /tmp/tmp.9VDJb0jIe3 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:27:46+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UNsatnxNsN +++ mktemp ++ local LAST_ERR=/tmp/tmp.6YVm2mzGkF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UNsatnxNsN ++ cat /tmp/tmp.6YVm2mzGkF ++ rm /tmp/tmp.UNsatnxNsN /tmp/tmp.6YVm2mzGkF ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Bc67pFcJ87 ++ mktemp + local LAST_ERR=/tmp/tmp.qt4q8EJCiM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Bc67pFcJ87 + cat /tmp/tmp.qt4q8EJCiM + rm /tmp/tmp.Bc67pFcJ87 /tmp/tmp.qt4q8EJCiM + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:27:49+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.80uIhe0dla +++ mktemp ++ local LAST_ERR=/tmp/tmp.fVjGnAR55i ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.80uIhe0dla ++ cat /tmp/tmp.fVjGnAR55i ++ rm /tmp/tmp.80uIhe0dla /tmp/tmp.fVjGnAR55i ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.VfgdnJgRjZ ++ mktemp + local LAST_ERR=/tmp/tmp.LWeOcf7QwG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VfgdnJgRjZ + cat /tmp/tmp.LWeOcf7QwG + rm /tmp/tmp.VfgdnJgRjZ /tmp/tmp.LWeOcf7QwG + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:27:51+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.63uCwktfWL +++ mktemp ++ local LAST_ERR=/tmp/tmp.gPMRVdUzHe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.63uCwktfWL ++ cat /tmp/tmp.gPMRVdUzHe ++ rm /tmp/tmp.63uCwktfWL /tmp/tmp.gPMRVdUzHe ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.DniKH6B4gB ++ mktemp + local LAST_ERR=/tmp/tmp.NXdfuyCMvq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DniKH6B4gB + cat /tmp/tmp.NXdfuyCMvq + rm /tmp/tmp.DniKH6B4gB /tmp/tmp.NXdfuyCMvq + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- ++ get_backup_dest backup-azure-blob ++ local backup_name=backup-azure-blob ++ kubectl_bin get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed -e 's/.json$//' ++ sed 's|gs://||' ++ sed 's|azure://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2nlVodahLx +++ mktemp ++ local LAST_ERR=/tmp/tmp.aAq9K2TTFj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2nlVodahLx ++ cat /tmp/tmp.aAq9K2TTFj ++ rm /tmp/tmp.2nlVodahLx /tmp/tmp.aAq9K2TTFj ++ return 0 + backup_dest_azure=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z + setup_azure_credentials + local secret_name=azure-secret + echo 'Setting up Azure credentials from K8s secret: azure-secret' Setting up Azure credentials from K8s secret: azure-secret + local trace_was_on=0 + [[ ehxB == *x* ]] + trace_was_on=1 + set +x + echo 'Azure credentials configured successfully' Azure credentials configured successfully + check_backup_existence_azure https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z /rs0/myApp.test.gz ++ echo https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z ++ sed 's|https://[^/]*\.blob\.core\.windows\.net/||' + url_path=operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z ++ cut -d/ -f1 + container=operator-testing ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z ++ cut -d/ -f2- + blob_prefix=psmdb-demand-backup/2026-01-12T21:24:52Z + blob=/rs0/myApp.test.gz + storage_name=azure-blob + retry=0 + blob_path=psmdb-demand-backup/2026-01-12T21:24:52Z/rs0/myApp.test.gz + az storage blob show --container-name operator-testing --name psmdb-demand-backup/2026-01-12T21:24:52Z/rs0/myApp.test.gz + echo 'Backup psmdb-demand-backup/2026-01-12T21:24:52Z/rs0/myApp.test.gz found in azure-blob' Backup psmdb-demand-backup/2026-01-12T21:24:52Z/rs0/myApp.test.gz found in azure-blob + run_recovery_check backup-azure-blob some-name-rs0 + local backup=backup-azure-blob + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8sGVtQbNH8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pv8JRP5UeK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8sGVtQbNH8 ++ cat /tmp/tmp.pv8JRP5UeK ++ rm /tmp/tmp.8sGVtQbNH8 /tmp/tmp.pv8JRP5UeK ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.AV64nUOSCS ++ mktemp + local LAST_ERR=/tmp/tmp.SDsA5DAwnT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AV64nUOSCS Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("155f6bd7-992b-405c-932b-cd58f394a76d") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.SDsA5DAwnT + rm /tmp/tmp.AV64nUOSCS /tmp/tmp.SDsA5DAwnT + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-19332 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:28:00+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-19332 mongodb .svc.cluster.local '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XVNKslgo3Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.IrGDpUwpkg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XVNKslgo3Z ++ cat /tmp/tmp.IrGDpUwpkg ++ rm /tmp/tmp.XVNKslgo3Z /tmp/tmp.IrGDpUwpkg ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.h2cwKq8bMW ++ mktemp + local LAST_ERR=/tmp/tmp.krJgeeqrnl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.h2cwKq8bMW + cat /tmp/tmp.krJgeeqrnl + rm /tmp/tmp.h2cwKq8bMW /tmp/tmp.krJgeeqrnl + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.lfGJl3onxJ/find-2nd + run_restore backup-azure-blob + local backup_name=backup-azure-blob + local restore_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + log 'running restore restore-backup-azure-blob' + set +o xtrace [2026-01-12T21:28:03+0000] running restore restore-backup-azure-blob + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-azure-blob/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-azure-blob/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0VlYCIPGqN ++ mktemp + local LAST_ERR=/tmp/tmp.RFTShMM6ee + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0VlYCIPGqN perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created + cat /tmp/tmp.RFTShMM6ee + rm /tmp/tmp.0VlYCIPGqN /tmp/tmp.RFTShMM6ee + return 0 + wait_restore backup-azure-blob some-name + local backup_name=backup-azure-blob + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-azure-blob object to be created.OK Waiting psmdb-restore/restore-backup-azure-blob to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.smowo3ehZb +++ mktemp ++ local LAST_ERR=/tmp/tmp.bf90qhDTuS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.smowo3ehZb ++ cat /tmp/tmp.bf90qhDTuS ++ rm /tmp/tmp.smowo3ehZb /tmp/tmp.bf90qhDTuS ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:28:31+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DuTGQzWLn9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VB3H7kKRrz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DuTGQzWLn9 ++ cat /tmp/tmp.VB3H7kKRrz ++ rm /tmp/tmp.DuTGQzWLn9 /tmp/tmp.VB3H7kKRrz ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.G8Og3234hF ++ mktemp + local LAST_ERR=/tmp/tmp.AL7rV6iPLl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.G8Og3234hF + cat /tmp/tmp.AL7rV6iPLl + rm /tmp/tmp.G8Og3234hF /tmp/tmp.AL7rV6iPLl + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:28:33+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pt9P36kuPl +++ mktemp ++ local LAST_ERR=/tmp/tmp.fr9IOACvFt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pt9P36kuPl ++ cat /tmp/tmp.fr9IOACvFt ++ rm /tmp/tmp.pt9P36kuPl /tmp/tmp.fr9IOACvFt ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rlOquTerkV ++ mktemp + local LAST_ERR=/tmp/tmp.bBcNkV23Xr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rlOquTerkV + cat /tmp/tmp.bBcNkV23Xr + rm /tmp/tmp.rlOquTerkV /tmp/tmp.bBcNkV23Xr + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:28:36+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xuNvdEN5XU +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q94gtzrR4W ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xuNvdEN5XU ++ cat /tmp/tmp.Q94gtzrR4W ++ rm /tmp/tmp.xuNvdEN5XU /tmp/tmp.Q94gtzrR4W ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.oobtkv0spG ++ mktemp + local LAST_ERR=/tmp/tmp.JYNsA4StsY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oobtkv0spG + cat /tmp/tmp.JYNsA4StsY + rm /tmp/tmp.oobtkv0spG /tmp/tmp.JYNsA4StsY + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- + check_backup_in_storage backup-minio minio rs0 myApp.test.gz + local backup=backup-minio + local storage_type=minio + local replset=rs0 + local file=myApp.test.gz + local endpoint ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ /usr/sbin/sed 's|https://engk8soperators.blob.core.windows.net/||' ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.bFARcJyYTf +++ mktemp ++ local LAST_ERR=/tmp/tmp.XD9o8apBBq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bFARcJyYTf ++ cat /tmp/tmp.XD9o8apBBq ++ rm /tmp/tmp.bFARcJyYTf /tmp/tmp.XD9o8apBBq ++ return 0 + backup_dest=operator-testing/2026-01-12T21:25:15Z + case ${storage_type} in + endpoint=minio-service + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2026-01-12T21:25:15Z/rs0/myApp.test.gz + grep myApp.test.gz ++ mktemp + local LAST_OUT=/tmp/tmp.9Ec2qRZAc8 ++ mktemp + local LAST_ERR=/tmp/tmp.M3v1pLQULP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2026-01-12T21:25:15Z/rs0/myApp.test.gz + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9Ec2qRZAc8 + cat /tmp/tmp.M3v1pLQULP + rm /tmp/tmp.9Ec2qRZAc8 /tmp/tmp.M3v1pLQULP + return 0 2026-01-12 21:25:19 55 myApp.test.gz + run_recovery_check backup-minio some-name-rs0 + local backup=backup-minio + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jU1kaAguF6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.MXdDmtzGo5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jU1kaAguF6 ++ cat /tmp/tmp.MXdDmtzGo5 ++ rm /tmp/tmp.jU1kaAguF6 /tmp/tmp.MXdDmtzGo5 ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.3CrgxUgdMM ++ mktemp + local LAST_ERR=/tmp/tmp.y1DIP8gSIG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3CrgxUgdMM Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("3c4938d3-a02a-489f-9bbb-0ba33700d216") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.y1DIP8gSIG + rm /tmp/tmp.3CrgxUgdMM /tmp/tmp.y1DIP8gSIG + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-19332 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:28:46+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-19332 mongodb .svc.cluster.local '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BbShoIkpFu +++ mktemp ++ local LAST_ERR=/tmp/tmp.acd45nzGCL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BbShoIkpFu ++ cat /tmp/tmp.acd45nzGCL ++ rm /tmp/tmp.BbShoIkpFu /tmp/tmp.acd45nzGCL ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.wdNhLBmXTQ ++ mktemp + local LAST_ERR=/tmp/tmp.ufszw4EDxw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wdNhLBmXTQ + cat /tmp/tmp.ufszw4EDxw + rm /tmp/tmp.wdNhLBmXTQ /tmp/tmp.ufszw4EDxw + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.lfGJl3onxJ/find-2nd + run_restore backup-minio + local backup_name=backup-minio + local restore_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + log 'running restore restore-backup-minio' + set +o xtrace [2026-01-12T21:28:48+0000] running restore restore-backup-minio + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0PSvu8yhmL ++ mktemp + local LAST_ERR=/tmp/tmp.HQ9KtfJOLh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0PSvu8yhmL perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created + cat /tmp/tmp.HQ9KtfJOLh + rm /tmp/tmp.0PSvu8yhmL /tmp/tmp.HQ9KtfJOLh + return 0 + wait_restore backup-minio some-name + local backup_name=backup-minio + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio object to be created.OK Waiting psmdb-restore/restore-backup-minio to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.54L3IwT74Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.S3WFTpbwiS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.54L3IwT74Q ++ cat /tmp/tmp.S3WFTpbwiS ++ rm /tmp/tmp.54L3IwT74Q /tmp/tmp.S3WFTpbwiS ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:29:09+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local driver=mongodb + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eIro1TYVc0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1pVV2Sqs0S ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eIro1TYVc0 ++ cat /tmp/tmp.1pVV2Sqs0S ++ rm /tmp/tmp.eIro1TYVc0 /tmp/tmp.1pVV2Sqs0S ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.PBhb1mIg7P ++ mktemp + local LAST_ERR=/tmp/tmp.TljZdGPcvG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PBhb1mIg7P + cat /tmp/tmp.TljZdGPcvG + rm /tmp/tmp.PBhb1mIg7P /tmp/tmp.TljZdGPcvG + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:29:11+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kLcbafo8Gs +++ mktemp ++ local LAST_ERR=/tmp/tmp.HdKEEKtgJG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kLcbafo8Gs ++ cat /tmp/tmp.HdKEEKtgJG ++ rm /tmp/tmp.kLcbafo8Gs /tmp/tmp.HdKEEKtgJG ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.8N7ievWflX ++ mktemp + local LAST_ERR=/tmp/tmp.VD1ZmSRGbh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8N7ievWflX + cat /tmp/tmp.VD1ZmSRGbh + rm /tmp/tmp.8N7ievWflX /tmp/tmp.VD1ZmSRGbh + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:29:13+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.thbwKvlReZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.x8W2meLTUr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.thbwKvlReZ ++ cat /tmp/tmp.x8W2meLTUr ++ rm /tmp/tmp.thbwKvlReZ /tmp/tmp.x8W2meLTUr ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.eX7p0xZPpN ++ mktemp + local LAST_ERR=/tmp/tmp.u9H0lRpkjh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eX7p0xZPpN + cat /tmp/tmp.u9H0lRpkjh + rm /tmp/tmp.eX7p0xZPpN /tmp/tmp.u9H0lRpkjh + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + run_mongo 'use myApp\n db.dropUser("test1user")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.dropUser("test1user")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Aqtx8KP5do +++ mktemp ++ local LAST_ERR=/tmp/tmp.6rDKORiVOj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Aqtx8KP5do ++ cat /tmp/tmp.6rDKORiVOj ++ rm /tmp/tmp.Aqtx8KP5do /tmp/tmp.6rDKORiVOj ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.dropUser("test1user")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Di5MQN0vcY ++ mktemp + local LAST_ERR=/tmp/tmp.N1w8Ca8G8z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.dropUser("test1user")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Di5MQN0vcY Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("e1ef0fa7-1d7e-4965-96aa-1edfc9b7e7ee") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.N1w8Ca8G8z + rm /tmp/tmp.Di5MQN0vcY /tmp/tmp.N1w8Ca8G8z + return 0 + run_mongo 'use myApp\n db.dropRole("test1role")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.dropRole("test1role")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JpXtW4p2ir +++ mktemp ++ local LAST_ERR=/tmp/tmp.MPaBhFB3ul ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JpXtW4p2ir ++ cat /tmp/tmp.MPaBhFB3ul ++ rm /tmp/tmp.JpXtW4p2ir /tmp/tmp.MPaBhFB3ul ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.dropRole("test1role")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.A6dcEAkoWf ++ mktemp + local LAST_ERR=/tmp/tmp.NTzdoksVwq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.dropRole("test1role")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.A6dcEAkoWf Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("30da2890-37b7-4f58-939c-97fe149ab9f3") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.NTzdoksVwq + rm /tmp/tmp.A6dcEAkoWf /tmp/tmp.NTzdoksVwq + return 0 + desc 'selective restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- selective restore -- minio ----------------------------------------------------------------------------------- + run_recovery_check_selective backup-minio some-name-rs0 myApp test2 false test1user test1role + local backup=backup-minio + local cluster=some-name-rs0 + local database=myApp + local collection=test2 + local with_users_and_roles=false + local username=test1user + local role=test1role + restore_name=restore-backup-minio-selective + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T1rv6s2JJY +++ mktemp ++ local LAST_ERR=/tmp/tmp.RieG8sItcH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.T1rv6s2JJY ++ cat /tmp/tmp.RieG8sItcH ++ rm /tmp/tmp.T1rv6s2JJY /tmp/tmp.RieG8sItcH ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.nYBqYCp4KY ++ mktemp + local LAST_ERR=/tmp/tmp.9Xsvyx0evv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nYBqYCp4KY Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("64599807-c6a2-4b8d-a167-631922d4d559") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.9Xsvyx0evv + rm /tmp/tmp.nYBqYCp4KY /tmp/tmp.9Xsvyx0evv + return 0 ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-19332 '' '' --quiet ++ grep -v 'switched to' ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-19332 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ jq 'index("test2") != null' ++ local replica_set=rs0 ++ [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.vyWadcvdO4 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.OsjFavnKK3 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.vyWadcvdO4 +++ cat /tmp/tmp.OsjFavnKK3 +++ rm /tmp/tmp.vyWadcvdO4 /tmp/tmp.OsjFavnKK3 +++ return 0 ++ local client_container=psmdb-client-696897d69b-5k2gv ++ kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fSMPHMks9c +++ mktemp ++ local LAST_ERR=/tmp/tmp.kuzTfclwNF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fSMPHMks9c ++ cat /tmp/tmp.kuzTfclwNF ++ rm /tmp/tmp.fSMPHMks9c /tmp/tmp.kuzTfclwNF ++ return 0 + [[ false == \t\r\u\e ]] + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + yq '.metadata.name="restore-backup-minio-selective"' + yq '.spec.backupName="backup-minio"' + yq '.spec.selective.namespaces[0]="myApp.test"' + yq .spec.selective.withUsersAndRoles=false + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.jIzRcyN79P ++ mktemp + local LAST_ERR=/tmp/tmp.KjuNJutmUH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jIzRcyN79P perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-selective created + cat /tmp/tmp.KjuNJutmUH + rm /tmp/tmp.jIzRcyN79P /tmp/tmp.KjuNJutmUH + return 0 + wait_restore backup-minio-selective some-name + local backup_name=backup-minio-selective + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-selective object to be created.OK Waiting psmdb-restore/restore-backup-minio-selective to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eL7BwM7TGs +++ mktemp ++ local LAST_ERR=/tmp/tmp.CQjwktlAPc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eL7BwM7TGs ++ cat /tmp/tmp.CQjwktlAPc ++ rm /tmp/tmp.eL7BwM7TGs /tmp/tmp.CQjwktlAPc ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-19332 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-19332 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ grep -v 'switched to' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ jq 'index("test2") != null' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.UKJgUes7eP ++++ mktemp +++ local LAST_ERR=/tmp/tmp.OFKQkK2pF1 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.UKJgUes7eP +++ cat /tmp/tmp.OFKQkK2pF1 +++ rm /tmp/tmp.UKJgUes7eP /tmp/tmp.OFKQkK2pF1 +++ return 0 ++ local client_container=psmdb-client-696897d69b-5k2gv ++ kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q6wUmGgNRQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.JUiuFZfGFj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.q6wUmGgNRQ ++ cat /tmp/tmp.JUiuFZfGFj ++ rm /tmp/tmp.q6wUmGgNRQ /tmp/tmp.JUiuFZfGFj ++ return 0 + [[ false == \t\r\u\e ]] + [[ false == \t\r\u\e ]] + [[ false == \f\a\l\s\e ]] ++ user_exists test1user ++ local username=test1user ++ run_mongo 'use myApp\n JSON.stringify(db.getUsers())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getUsers())' ++ grep -v 'switched to' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ jq 'any(.[]; ._id==myApp.test1user)' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OTcjVglWh6 jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1user) jq: 1 compile error ++++ mktemp +++ local LAST_ERR=/tmp/tmp.bd4zrhlqZ3 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.OTcjVglWh6 +++ cat /tmp/tmp.bd4zrhlqZ3 +++ rm /tmp/tmp.OTcjVglWh6 /tmp/tmp.bd4zrhlqZ3 +++ return 0 ++ local client_container=psmdb-client-696897d69b-5k2gv ++ kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ckY4H3xlCS +++ mktemp ++ local LAST_ERR=/tmp/tmp.zPcfBNOCAP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ckY4H3xlCS ++ cat /tmp/tmp.zPcfBNOCAP ++ rm /tmp/tmp.ckY4H3xlCS /tmp/tmp.zPcfBNOCAP ++ return 0 grep: write error: Broken pipe + [[ '' == \t\r\u\e ]] ++ role_exists test1role ++ local role=test1role ++ run_mongo 'use myApp\n JSON.stringify(db.getRoles())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getRoles())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ grep -v 'switched to' ++ jq 'any(.[]; ._id==myApp.test1role)' ++++ mktemp jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1role) jq: 1 compile error +++ local LAST_OUT=/tmp/tmp.F4Rp0YMNh9 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.UgSkutGZgp +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.F4Rp0YMNh9 +++ cat /tmp/tmp.UgSkutGZgp +++ rm /tmp/tmp.F4Rp0YMNh9 /tmp/tmp.UgSkutGZgp +++ return 0 ++ local client_container=psmdb-client-696897d69b-5k2gv ++ kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.96ozwbkAHG +++ mktemp ++ local LAST_ERR=/tmp/tmp.vLPBwT5cCP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.96ozwbkAHG ++ cat /tmp/tmp.vLPBwT5cCP ++ rm /tmp/tmp.96ozwbkAHG /tmp/tmp.vLPBwT5cCP ++ return 0 grep: write error: Broken pipe + [[ '' == \t\r\u\e ]] + kubectl_bin delete psmdb-restore restore-backup-minio-selective ++ mktemp + local LAST_OUT=/tmp/tmp.BnO47WTeV9 ++ mktemp + local LAST_ERR=/tmp/tmp.dnfOxwtApM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-restore restore-backup-minio-selective + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BnO47WTeV9 perconaservermongodbrestore.psmdb.percona.com "restore-backup-minio-selective" deleted from demand-backup-19332 namespace + cat /tmp/tmp.dnfOxwtApM + rm /tmp/tmp.BnO47WTeV9 /tmp/tmp.dnfOxwtApM + return 0 + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.12YEy4ZVpR +++ mktemp ++ local LAST_ERR=/tmp/tmp.fajFDCJtsQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.12YEy4ZVpR ++ cat /tmp/tmp.fajFDCJtsQ ++ rm /tmp/tmp.12YEy4ZVpR /tmp/tmp.fajFDCJtsQ ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.OHhN0sqbX2 ++ mktemp + local LAST_ERR=/tmp/tmp.FDu6fs9waj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OHhN0sqbX2 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("66dbb9a5-fafc-4d9a-8797-19767ed48b35") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.FDu6fs9waj + rm /tmp/tmp.OHhN0sqbX2 /tmp/tmp.FDu6fs9waj + return 0 + desc 'selective restore with users and roles -- minio' + set +o xtrace ----------------------------------------------------------------------------------- selective restore with users and roles -- minio ----------------------------------------------------------------------------------- + run_recovery_check_selective backup-minio some-name-rs0 myApp test2 true test1user test1role + local backup=backup-minio + local cluster=some-name-rs0 + local database=myApp + local collection=test2 + local with_users_and_roles=true + local username=test1user + local role=test1role + restore_name=restore-backup-minio-selective + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8SEjOhyudy +++ mktemp ++ local LAST_ERR=/tmp/tmp.HcZ6HYe8st ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8SEjOhyudy ++ cat /tmp/tmp.HcZ6HYe8st ++ rm /tmp/tmp.8SEjOhyudy /tmp/tmp.HcZ6HYe8st ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SESfBt6IdT ++ mktemp + local LAST_ERR=/tmp/tmp.5xliuHgNpU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SESfBt6IdT Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("495f71f6-2efd-408f-b18a-7d3f791ae098") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.5xliuHgNpU + rm /tmp/tmp.SESfBt6IdT /tmp/tmp.5xliuHgNpU + return 0 ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-19332 '' '' --quiet ++ grep -v 'switched to' ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-19332 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ jq 'index("test2") != null' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.TUTq2S0deQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.enfr79FEh7 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.TUTq2S0deQ +++ cat /tmp/tmp.enfr79FEh7 +++ rm /tmp/tmp.TUTq2S0deQ /tmp/tmp.enfr79FEh7 +++ return 0 ++ local client_container=psmdb-client-696897d69b-5k2gv ++ kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JUjysWwyOx +++ mktemp ++ local LAST_ERR=/tmp/tmp.VTtDgbxK3A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JUjysWwyOx ++ cat /tmp/tmp.VTtDgbxK3A ++ rm /tmp/tmp.JUjysWwyOx /tmp/tmp.VTtDgbxK3A ++ return 0 + [[ false == \t\r\u\e ]] + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore.yml + yq '.metadata.name="restore-backup-minio-selective"' + yq '.spec.backupName="backup-minio"' + yq '.spec.selective.namespaces[0]="myApp.test"' + yq .spec.selective.withUsersAndRoles=true + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.YqpHVchAeI ++ mktemp + local LAST_ERR=/tmp/tmp.FOXgLdfj8C + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YqpHVchAeI perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-selective created + cat /tmp/tmp.FOXgLdfj8C + rm /tmp/tmp.YqpHVchAeI /tmp/tmp.FOXgLdfj8C + return 0 + wait_restore backup-minio-selective some-name + local backup_name=backup-minio-selective + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-selective object to be created.OK Waiting psmdb-restore/restore-backup-minio-selective to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mutF11Juzq +++ mktemp ++ local LAST_ERR=/tmp/tmp.rnRazrBbXU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mutF11Juzq ++ cat /tmp/tmp.rnRazrBbXU ++ rm /tmp/tmp.mutF11Juzq /tmp/tmp.rnRazrBbXU ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-19332 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-19332 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ grep -v 'switched to' ++ jq 'index("test2") != null' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.WErT7jbwPH ++++ mktemp +++ local LAST_ERR=/tmp/tmp.GhICie8tlm +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.WErT7jbwPH +++ cat /tmp/tmp.GhICie8tlm +++ rm /tmp/tmp.WErT7jbwPH /tmp/tmp.GhICie8tlm +++ return 0 ++ local client_container=psmdb-client-696897d69b-5k2gv ++ kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jAP2a2eZUu +++ mktemp ++ local LAST_ERR=/tmp/tmp.y9JugO38rY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jAP2a2eZUu ++ cat /tmp/tmp.y9JugO38rY ++ rm /tmp/tmp.jAP2a2eZUu /tmp/tmp.y9JugO38rY ++ return 0 + [[ false == \t\r\u\e ]] + [[ true == \t\r\u\e ]] ++ user_exists test1user ++ local username=test1user ++ run_mongo 'use myApp\n JSON.stringify(db.getUsers())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getUsers())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ grep -v 'switched to' ++ jq 'any(.[]; ._id==myApp.test1user)' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.l6YrEpsRkG ++++ mktemp jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1user) jq: 1 compile error +++ local LAST_ERR=/tmp/tmp.yjl3KqluyV +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.l6YrEpsRkG +++ cat /tmp/tmp.yjl3KqluyV +++ rm /tmp/tmp.l6YrEpsRkG /tmp/tmp.yjl3KqluyV +++ return 0 ++ local client_container=psmdb-client-696897d69b-5k2gv ++ kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oivLGBt55U +++ mktemp ++ local LAST_ERR=/tmp/tmp.ucRknvWAgy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oivLGBt55U ++ cat /tmp/tmp.ucRknvWAgy ++ rm /tmp/tmp.oivLGBt55U /tmp/tmp.ucRknvWAgy ++ return 0 grep: write error: Broken pipe + [[ '' == \f\a\l\s\e ]] ++ role_exists test1role ++ local role=test1role ++ run_mongo 'use myApp\n JSON.stringify(db.getRoles())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getRoles())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 ++ grep -v 'switched to' ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ local mongo_flag=--quiet ++ local replica_set=rs0 ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ jq 'any(.[]; ._id==myApp.test1role)' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1role) jq: 1 compile error +++ local LAST_OUT=/tmp/tmp.wVwinvkWvx ++++ mktemp +++ local LAST_ERR=/tmp/tmp.FLpSop9KtR +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.wVwinvkWvx +++ cat /tmp/tmp.FLpSop9KtR +++ rm /tmp/tmp.wVwinvkWvx /tmp/tmp.FLpSop9KtR +++ return 0 ++ local client_container=psmdb-client-696897d69b-5k2gv ++ kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NlqjM5rpjD +++ mktemp ++ local LAST_ERR=/tmp/tmp.MziqRA3Mye ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NlqjM5rpjD ++ cat /tmp/tmp.MziqRA3Mye ++ rm /tmp/tmp.NlqjM5rpjD /tmp/tmp.MziqRA3Mye ++ return 0 grep: write error: Broken pipe + [[ '' == \f\a\l\s\e ]] + desc 'restore from backup source, with storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, with storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.HtJJelEEzU +++ mktemp ++ local LAST_ERR=/tmp/tmp.4pKJiZobIF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HtJJelEEzU ++ cat /tmp/tmp.4pKJiZobIF ++ rm /tmp/tmp.HtJJelEEzU /tmp/tmp.4pKJiZobIF ++ return 0 + backup_dest_minio=operator-testing/2026-01-12T21:25:15Z + run_recovery_check_bkp_source backup-minio operator-testing/2026-01-12T21:25:15Z some-name-rs0 backup-minio-source-0 + local backup=backup-minio + local backup_dest=operator-testing/2026-01-12T21:25:15Z + local cluster=some-name-rs0 + local source=backup-minio-source-0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2oFiSmOfdw +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ib5HhMR7F0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2oFiSmOfdw ++ cat /tmp/tmp.Ib5HhMR7F0 ++ rm /tmp/tmp.2oFiSmOfdw /tmp/tmp.Ib5HhMR7F0 ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.9PQ2OXxZB1 ++ mktemp + local LAST_ERR=/tmp/tmp.8iAi7O4rjI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9PQ2OXxZB1 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("45058e99-cce6-457f-a310-9a2b970e1829") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.8iAi7O4rjI + rm /tmp/tmp.9PQ2OXxZB1 /tmp/tmp.8iAi7O4rjI + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-19332 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:30:33+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-19332 mongodb .svc.cluster.local '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P2yUiIT7Zu +++ mktemp ++ local LAST_ERR=/tmp/tmp.AK7bFowH1R ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.P2yUiIT7Zu ++ cat /tmp/tmp.AK7bFowH1R ++ rm /tmp/tmp.P2yUiIT7Zu /tmp/tmp.AK7bFowH1R ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.UyB60WEPOQ ++ mktemp + local LAST_ERR=/tmp/tmp.sBK5ow4kJk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UyB60WEPOQ + cat /tmp/tmp.sBK5ow4kJk + rm /tmp/tmp.UyB60WEPOQ /tmp/tmp.sBK5ow4kJk + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.lfGJl3onxJ/find-2nd + run_restore_backupsource backup-minio-source-0 operator-testing/2026-01-12T21:25:15Z + local backupName=backup-minio-source-0 + local backupDest=operator-testing/2026-01-12T21:25:15Z + local storageName= + desc 'run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2026-01-12T21:25:15Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2026-01-12T21:25:15Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore-backupsource.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-source-0/' + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/2026-01-12T21:25:15Z|' + /usr/sbin/sed -e /storageName/d + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.VA7alMXutR ++ mktemp + local LAST_ERR=/tmp/tmp.ClbIGLoegk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VA7alMXutR perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-0 created + cat /tmp/tmp.ClbIGLoegk + rm /tmp/tmp.VA7alMXutR /tmp/tmp.ClbIGLoegk + return 0 + return + wait_restore backup-minio-source-0 some-name + local backup_name=backup-minio-source-0 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-source-0 object to be created.OK Waiting psmdb-restore/restore-backup-minio-source-0 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CNdbAhqZOb +++ mktemp ++ local LAST_ERR=/tmp/tmp.HLu6C0VIC8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CNdbAhqZOb ++ cat /tmp/tmp.HLu6C0VIC8 ++ rm /tmp/tmp.CNdbAhqZOb /tmp/tmp.HLu6C0VIC8 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:30:56+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 mongodb '' '' + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 == *cfg* ]] + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F3lC3ZDbKk +++ mktemp ++ local LAST_ERR=/tmp/tmp.sgq200S58v ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.F3lC3ZDbKk ++ cat /tmp/tmp.sgq200S58v ++ rm /tmp/tmp.F3lC3ZDbKk /tmp/tmp.sgq200S58v ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.RSbl6E4QDP ++ mktemp + local LAST_ERR=/tmp/tmp.cl61eTMerE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RSbl6E4QDP + cat /tmp/tmp.cl61eTMerE + rm /tmp/tmp.RSbl6E4QDP /tmp/tmp.cl61eTMerE + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:30:58+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LZjxs8WxRp +++ mktemp ++ local LAST_ERR=/tmp/tmp.0FIS0rLYy3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LZjxs8WxRp ++ cat /tmp/tmp.0FIS0rLYy3 ++ rm /tmp/tmp.LZjxs8WxRp /tmp/tmp.0FIS0rLYy3 ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.KrXOaBOZv5 ++ mktemp + local LAST_ERR=/tmp/tmp.3ienTqO64l + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KrXOaBOZv5 + cat /tmp/tmp.3ienTqO64l + rm /tmp/tmp.KrXOaBOZv5 /tmp/tmp.3ienTqO64l + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:31:01+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LqUJqwgSRw +++ mktemp ++ local LAST_ERR=/tmp/tmp.gn3sxLk9ih ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LqUJqwgSRw ++ cat /tmp/tmp.gn3sxLk9ih ++ rm /tmp/tmp.LqUJqwgSRw /tmp/tmp.gn3sxLk9ih ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.yzeWtF2Z0f ++ mktemp + local LAST_ERR=/tmp/tmp.WlBLkKga2s + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yzeWtF2Z0f + cat /tmp/tmp.WlBLkKga2s + rm /tmp/tmp.yzeWtF2Z0f /tmp/tmp.WlBLkKga2s + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + desc 'restore from backup source, no storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, no storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ sed -e 's/.json$//' ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed 's|gs://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6GATrgJn2M +++ mktemp ++ local LAST_ERR=/tmp/tmp.SKTXM9Wksu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6GATrgJn2M ++ cat /tmp/tmp.SKTXM9Wksu ++ rm /tmp/tmp.6GATrgJn2M /tmp/tmp.SKTXM9Wksu ++ return 0 + backup_dest_minio=operator-testing/2026-01-12T21:25:15Z + run_recovery_check_bkp_source backup-minio operator-testing/2026-01-12T21:25:15Z some-name-rs0 backup-minio-source-1 + local backup=backup-minio + local backup_dest=operator-testing/2026-01-12T21:25:15Z + local cluster=some-name-rs0 + local source=backup-minio-source-1 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-19332 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb+srv + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UEHk4ggGWO +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tsb82MU8mV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UEHk4ggGWO ++ cat /tmp/tmp.Tsb82MU8mV ++ rm /tmp/tmp.UEHk4ggGWO /tmp/tmp.Tsb82MU8mV ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.MYiDEl1fxn ++ mktemp + local LAST_ERR=/tmp/tmp.uTbQsxQFC8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MYiDEl1fxn Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("79147625-39db-426c-b88d-a16dcdfe168e") } Percona Server for MongoDB server version: v8.0.17-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.uTbQsxQFC8 + rm /tmp/tmp.MYiDEl1fxn /tmp/tmp.uTbQsxQFC8 + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-19332 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:31:06+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-19332 mongodb .svc.cluster.local '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tUyXG5dTjt +++ mktemp ++ local LAST_ERR=/tmp/tmp.aqFGNZGIRD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tUyXG5dTjt ++ cat /tmp/tmp.aqFGNZGIRD ++ rm /tmp/tmp.tUyXG5dTjt /tmp/tmp.aqFGNZGIRD ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.3q7qfNR3Lp ++ mktemp + local LAST_ERR=/tmp/tmp.f0cqAzopKH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3q7qfNR3Lp + cat /tmp/tmp.f0cqAzopKH + rm /tmp/tmp.3q7qfNR3Lp /tmp/tmp.f0cqAzopKH + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.lfGJl3onxJ/find-2nd + run_restore_backupsource backup-minio-source-1 operator-testing/2026-01-12T21:25:15Z + local backupName=backup-minio-source-1 + local backupDest=operator-testing/2026-01-12T21:25:15Z + local storageName= + desc 'run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2026-01-12T21:25:15Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2026-01-12T21:25:15Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/restore-backupsource.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-source-1/' + /usr/sbin/sed -e 's|BACKUP-NAME|operator-testing/2026-01-12T21:25:15Z|' + /usr/sbin/sed -e /storageName/d + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.c2BzzF3v3T ++ mktemp + local LAST_ERR=/tmp/tmp.xaEshp0SHO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.c2BzzF3v3T perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-1 created + cat /tmp/tmp.xaEshp0SHO + rm /tmp/tmp.c2BzzF3v3T /tmp/tmp.xaEshp0SHO + return 0 + return + wait_restore backup-minio-source-1 some-name + local backup_name=backup-minio-source-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-source-1 object to be created.OK Waiting psmdb-restore/restore-backup-minio-source-1 to reach state "ready" .OK after 0 minutes + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bOC3Hu8K36 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6Td5tqscsA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bOC3Hu8K36 ++ cat /tmp/tmp.6Td5tqscsA ++ rm /tmp/tmp.bOC3Hu8K36 /tmp/tmp.6Td5tqscsA ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:31:30+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cDnklp8xaK +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vdo2E4TwXL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cDnklp8xaK ++ cat /tmp/tmp.Vdo2E4TwXL ++ rm /tmp/tmp.cDnklp8xaK /tmp/tmp.Vdo2E4TwXL ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.336bQWuNjI ++ mktemp + local LAST_ERR=/tmp/tmp.UXUC60ARdT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.336bQWuNjI + cat /tmp/tmp.UXUC60ARdT + rm /tmp/tmp.336bQWuNjI /tmp/tmp.UXUC60ARdT + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:31:32+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WadVZKu5Ei +++ mktemp ++ local LAST_ERR=/tmp/tmp.S9gtkJP8Lo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WadVZKu5Ei ++ cat /tmp/tmp.S9gtkJP8Lo ++ rm /tmp/tmp.WadVZKu5Ei /tmp/tmp.S9gtkJP8Lo ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.XEvijHyZo0 ++ mktemp + local LAST_ERR=/tmp/tmp.tWMpeipCcJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XEvijHyZo0 + cat /tmp/tmp.tWMpeipCcJ + rm /tmp/tmp.XEvijHyZo0 /tmp/tmp.tWMpeipCcJ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + for i in $(seq 0 $last_pod) + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local replicaset= + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2026-01-12T21:31:33+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 mongodb '' '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 + local driver=mongodb + local suffix=.svc.cluster.local + local mongo_flag= + local replica_set=rs0 + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332 == *cfg* ]] + grep -E -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JPEGGXTeIC +++ mktemp ++ local LAST_ERR=/tmp/tmp.mM5MHadxKi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JPEGGXTeIC ++ cat /tmp/tmp.mM5MHadxKi ++ rm /tmp/tmp.JPEGGXTeIC /tmp/tmp.mM5MHadxKi ++ return 0 + local client_container=psmdb-client-696897d69b-5k2gv + kubectl_bin exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.P6K4saacuZ ++ mktemp + local LAST_ERR=/tmp/tmp.BmzPVkEAs5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-5k2gv -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-19332.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.P6K4saacuZ + cat /tmp/tmp.BmzPVkEAs5 + rm /tmp/tmp.P6K4saacuZ /tmp/tmp.BmzPVkEAs5 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/compare/find.json /tmp/tmp.lfGJl3onxJ/find + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.r8flbwjZnF ++ mktemp + local LAST_ERR=/tmp/tmp.n7Phrh4BaF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.r8flbwjZnF perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted from demand-backup-19332 namespace perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted from demand-backup-19332 namespace perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs-s3" deleted from demand-backup-19332 namespace perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs-sa" deleted from demand-backup-19332 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted from demand-backup-19332 namespace + cat /tmp/tmp.n7Phrh4BaF + rm /tmp/tmp.r8flbwjZnF /tmp/tmp.n7Phrh4BaF + return 0 ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ grep -c operator-testing/2026-01-12T21:25:15Z ++ cat +++ mktemp ++ local LAST_OUT=/tmp/tmp.pkslC5gc8S +++ mktemp ++ local LAST_ERR=/tmp/tmp.z639uXhBPC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pkslC5gc8S ++ cat /tmp/tmp.z639uXhBPC All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: Internal error occurred: error attaching to container: container is in CONTAINER_EXITED state ++ rm /tmp/tmp.pkslC5gc8S /tmp/tmp.z639uXhBPC ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion_aws operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z /rs0/myApp.test.gz ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z ++ cut -d/ -f1 + bucket=operator-testing ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:24:40Z ++ cut -d/ -f2- + key_prefix=psmdb-demand-backup/2026-01-12T21:24:40Z + key=/rs0/myApp.test.gz + storage_name=aws-s3 + retry=0 + aws s3api head-object --bucket operator-testing --key psmdb-demand-backup/2026-01-12T21:24:40Z/rs0/myApp.test.gz + echo 'Backup psmdb-demand-backup/2026-01-12T21:24:40Z/rs0/myApp.test.gz in bucket operator-testing not found in aws-s3' Backup psmdb-demand-backup/2026-01-12T21:24:40Z/rs0/myApp.test.gz in bucket operator-testing not found in aws-s3 + check_backup_deletion_gcs operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z /rs0/myApp.test.gz + backup_dest_gcp=operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z + obj=/rs0/myApp.test.gz + storage_name=gcp-cs + retry=0 + gcs_path=gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z/rs0/myApp.test.gz + gsutil ls gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z/rs0/myApp.test.gz + echo 'Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z/rs0/myApp.test.gz not found in gcp-cs' Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:03Z/rs0/myApp.test.gz not found in gcp-cs + check_backup_deletion_gcs operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z /rs0/myApp.test.gz + backup_dest_gcp=operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z + obj=/rs0/myApp.test.gz + storage_name=gcp-cs + retry=0 + gcs_path=gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z/rs0/myApp.test.gz + gsutil ls gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z/rs0/myApp.test.gz + echo 'Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z/rs0/myApp.test.gz not found in gcp-cs' Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:25:25Z/rs0/myApp.test.gz not found in gcp-cs + check_backup_deletion_azure https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z /rs0/myApp.test.gz ++ echo https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z ++ sed 's|https://[^/]*\.blob\.core\.windows\.net/||' + url_path=operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z ++ cut -d/ -f1 + container=operator-testing ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:24:52Z ++ cut -d/ -f2- + blob_prefix=psmdb-demand-backup/2026-01-12T21:24:52Z + blob=/rs0/myApp.test.gz + storage_name=azure-blob + retry=0 + blob_path=psmdb-demand-backup/2026-01-12T21:24:52Z/rs0/myApp.test.gz + az storage blob show --container-name operator-testing --name psmdb-demand-backup/2026-01-12T21:24:52Z/rs0/myApp.test.gz + echo 'Backup psmdb-demand-backup/2026-01-12T21:24:52Z/rs0/myApp.test.gz in container operator-testing not found in azure-blob' Backup psmdb-demand-backup/2026-01-12T21:24:52Z/rs0/myApp.test.gz in container operator-testing not found in azure-blob + desc 'checking backup deletion without cr' + set +o xtrace ----------------------------------------------------------------------------------- checking backup deletion without cr ----------------------------------------------------------------------------------- + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + log 'running backup backup-minio' + set +o xtrace [2026-01-12T21:31:51+0000] running backup backup-minio + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.wYPRIBnnkU ++ mktemp + local LAST_ERR=/tmp/tmp.jRp6pzILVL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wYPRIBnnkU perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.jRp6pzILVL + rm /tmp/tmp.wYPRIBnnkU /tmp/tmp.jRp6pzILVL + return 0 + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local type=logical + log 'running backup backup-aws-s3' + set +o xtrace [2026-01-12T21:31:53+0000] running backup backup-aws-s3 + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-aws-s3.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.eUGcJTeziq ++ mktemp + local LAST_ERR=/tmp/tmp.kxoptM4jeS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eUGcJTeziq perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.kxoptM4jeS + rm /tmp/tmp.eUGcJTeziq /tmp/tmp.kxoptM4jeS + return 0 + run_backup gcp-cs-s3 + local storage=gcp-cs-s3 + local backup_name=backup-gcp-cs-s3 + local type=logical + log 'running backup backup-gcp-cs-s3' + set +o xtrace [2026-01-12T21:31:54+0000] running backup backup-gcp-cs-s3 + yq eval '.metadata.name = "backup-gcp-cs-s3" | .spec.storageName = "gcp-cs-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-gcp-cs-s3.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.JzpotsN9Sz ++ mktemp + local LAST_ERR=/tmp/tmp.EACKDPvtXQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JzpotsN9Sz perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs-s3 created + cat /tmp/tmp.EACKDPvtXQ + rm /tmp/tmp.JzpotsN9Sz /tmp/tmp.EACKDPvtXQ + return 0 + run_backup gcp-cs-sa + local storage=gcp-cs-sa + local backup_name=backup-gcp-cs-sa + local type=logical + log 'running backup backup-gcp-cs-sa' + set +o xtrace [2026-01-12T21:31:57+0000] running backup backup-gcp-cs-sa + yq eval '.metadata.name = "backup-gcp-cs-sa" | .spec.storageName = "gcp-cs-sa" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-gcp-cs-sa.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ia9GzrizL9 ++ mktemp + local LAST_ERR=/tmp/tmp.SU1T5z02Mj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ia9GzrizL9 perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs-sa created + cat /tmp/tmp.SU1T5z02Mj + rm /tmp/tmp.ia9GzrizL9 /tmp/tmp.SU1T5z02Mj + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local type=logical + log 'running backup backup-azure-blob' + set +o xtrace [2026-01-12T21:31:59+0000] running backup backup-azure-blob + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/e2e-tests/demand-backup/conf/backup-azure-blob.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.U5q4y6QyUY ++ mktemp + local LAST_ERR=/tmp/tmp.GV5pR7xtfs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.U5q4y6QyUY perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.GV5pR7xtfs + rm /tmp/tmp.U5q4y6QyUY /tmp/tmp.GV5pR7xtfs + return 0 + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state...OK + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state............OK + wait_backup backup-gcp-cs-s3 + local backup_name=backup-gcp-cs-s3 + local target_state=ready + set +o xtrace waiting for backup-gcp-cs-s3 to reach ready state............OK + wait_backup backup-gcp-cs-sa + local backup_name=backup-gcp-cs-sa + local target_state=ready + set +o xtrace waiting for backup-gcp-cs-sa to reach ready state..OK + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state..OK ++ get_backup_dest backup-aws-s3 ++ local backup_name=backup-aws-s3 ++ kubectl_bin get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.IKd8ZRD9gY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lgkz05HfDF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IKd8ZRD9gY ++ cat /tmp/tmp.Lgkz05HfDF ++ rm /tmp/tmp.IKd8ZRD9gY /tmp/tmp.Lgkz05HfDF ++ return 0 + backup_dest_aws=operator-testing/psmdb-demand-backup/2026-01-12T21:32:15Z ++ get_backup_dest backup-gcp-cs-s3 ++ local backup_name=backup-gcp-cs-s3 ++ kubectl_bin get psmdb-backup backup-gcp-cs-s3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.dJYTK0DCjI +++ mktemp ++ local LAST_ERR=/tmp/tmp.zUlVpEPRHB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dJYTK0DCjI ++ cat /tmp/tmp.zUlVpEPRHB ++ rm /tmp/tmp.dJYTK0DCjI /tmp/tmp.zUlVpEPRHB ++ return 0 + backup_dest_gcp_s3=operator-testing/psmdb-demand-backup/2026-01-12T21:32:38Z ++ get_backup_dest backup-gcp-cs-sa ++ local backup_name=backup-gcp-cs-sa ++ kubectl_bin get psmdb-backup backup-gcp-cs-sa -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed 's|gs://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AZRMUJhPYZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ejSFB3xfMT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs-sa -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AZRMUJhPYZ ++ cat /tmp/tmp.ejSFB3xfMT ++ rm /tmp/tmp.AZRMUJhPYZ /tmp/tmp.ejSFB3xfMT ++ return 0 + backup_dest_gcp_sa=operator-testing/psmdb-demand-backup/2026-01-12T21:32:04Z ++ get_backup_dest backup-azure-blob ++ local backup_name=backup-azure-blob ++ kubectl_bin get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.Xv0pmfATDY +++ mktemp ++ local LAST_ERR=/tmp/tmp.WWzy5QZgMz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Xv0pmfATDY ++ cat /tmp/tmp.WWzy5QZgMz ++ rm /tmp/tmp.Xv0pmfATDY /tmp/tmp.WWzy5QZgMz ++ return 0 + backup_dest_azure=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2026-01-12T21:32:26Z + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.p7MXcf0nRh ++ mktemp + local LAST_ERR=/tmp/tmp.S4kMlFsyCD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.p7MXcf0nRh perconaservermongodb.psmdb.percona.com "some-name" deleted from demand-backup-19332 namespace + cat /tmp/tmp.S4kMlFsyCD + rm /tmp/tmp.p7MXcf0nRh /tmp/tmp.S4kMlFsyCD + return 0 + sleep 60 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.E0JzxXUkKL ++ mktemp + local LAST_ERR=/tmp/tmp.L4ZMVVODHk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.E0JzxXUkKL perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted from demand-backup-19332 namespace perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted from demand-backup-19332 namespace perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs-s3" deleted from demand-backup-19332 namespace perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs-sa" deleted from demand-backup-19332 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted from demand-backup-19332 namespace + cat /tmp/tmp.L4ZMVVODHk + rm /tmp/tmp.E0JzxXUkKL /tmp/tmp.L4ZMVVODHk + return 0 ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ grep -c operator-testing/2026-01-12T21:25:15Z ++ cat +++ mktemp ++ local LAST_OUT=/tmp/tmp.xKvnzVhaVH +++ mktemp ++ local LAST_ERR=/tmp/tmp.ezfUvJdh08 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xKvnzVhaVH ++ cat /tmp/tmp.ezfUvJdh08 ++ rm /tmp/tmp.xKvnzVhaVH /tmp/tmp.ezfUvJdh08 ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion_aws operator-testing/psmdb-demand-backup/2026-01-12T21:32:15Z /rs0/myApp.test.gz ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:32:15Z ++ cut -d/ -f1 + bucket=operator-testing ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:32:15Z ++ cut -d/ -f2- + key_prefix=psmdb-demand-backup/2026-01-12T21:32:15Z + key=/rs0/myApp.test.gz + storage_name=aws-s3 + retry=0 + aws s3api head-object --bucket operator-testing --key psmdb-demand-backup/2026-01-12T21:32:15Z/rs0/myApp.test.gz + echo 'Backup psmdb-demand-backup/2026-01-12T21:32:15Z/rs0/myApp.test.gz in bucket operator-testing not found in aws-s3' Backup psmdb-demand-backup/2026-01-12T21:32:15Z/rs0/myApp.test.gz in bucket operator-testing not found in aws-s3 + check_backup_deletion_gcs operator-testing/psmdb-demand-backup/2026-01-12T21:32:38Z /rs0/myApp.test.gz + backup_dest_gcp=operator-testing/psmdb-demand-backup/2026-01-12T21:32:38Z + obj=/rs0/myApp.test.gz + storage_name=gcp-cs + retry=0 + gcs_path=gs://operator-testing/psmdb-demand-backup/2026-01-12T21:32:38Z/rs0/myApp.test.gz + gsutil ls gs://operator-testing/psmdb-demand-backup/2026-01-12T21:32:38Z/rs0/myApp.test.gz + echo 'Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:32:38Z/rs0/myApp.test.gz not found in gcp-cs' Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:32:38Z/rs0/myApp.test.gz not found in gcp-cs + check_backup_deletion_gcs operator-testing/psmdb-demand-backup/2026-01-12T21:32:04Z /rs0/myApp.test.gz + backup_dest_gcp=operator-testing/psmdb-demand-backup/2026-01-12T21:32:04Z + obj=/rs0/myApp.test.gz + storage_name=gcp-cs + retry=0 + gcs_path=gs://operator-testing/psmdb-demand-backup/2026-01-12T21:32:04Z/rs0/myApp.test.gz + gsutil ls gs://operator-testing/psmdb-demand-backup/2026-01-12T21:32:04Z/rs0/myApp.test.gz + echo 'Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:32:04Z/rs0/myApp.test.gz not found in gcp-cs' Backup gs://operator-testing/psmdb-demand-backup/2026-01-12T21:32:04Z/rs0/myApp.test.gz not found in gcp-cs + check_backup_deletion_azure https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2026-01-12T21:32:26Z /rs0/myApp.test.gz ++ echo https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2026-01-12T21:32:26Z ++ sed 's|https://[^/]*\.blob\.core\.windows\.net/||' + url_path=operator-testing/psmdb-demand-backup/2026-01-12T21:32:26Z ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:32:26Z ++ cut -d/ -f1 + container=operator-testing ++ echo operator-testing/psmdb-demand-backup/2026-01-12T21:32:26Z ++ cut -d/ -f2- + blob_prefix=psmdb-demand-backup/2026-01-12T21:32:26Z + blob=/rs0/myApp.test.gz + storage_name=azure-blob + retry=0 + blob_path=psmdb-demand-backup/2026-01-12T21:32:26Z/rs0/myApp.test.gz + az storage blob show --container-name operator-testing --name psmdb-demand-backup/2026-01-12T21:32:26Z/rs0/myApp.test.gz + echo 'Backup psmdb-demand-backup/2026-01-12T21:32:26Z/rs0/myApp.test.gz in container operator-testing not found in azure-blob' Backup psmdb-demand-backup/2026-01-12T21:32:26Z/rs0/myApp.test.gz in container operator-testing not found in azure-blob + destroy demand-backup-19332 + local namespace=demand-backup-19332 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.uj9nhYEC89 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BZAWpH0fpA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uj9nhYEC89 ++ cat /tmp/tmp.BZAWpH0fpA No resources found in demand-backup-19332 namespace. ++ rm /tmp/tmp.uj9nhYEC89 /tmp/tmp.BZAWpH0fpA ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.bMoogLfhOQ ++ mktemp + local LAST_ERR=/tmp/tmp.8pj2JGeK6O + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bMoogLfhOQ customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.8pj2JGeK6O + rm /tmp/tmp.bMoogLfhOQ /tmp/tmp.8pj2JGeK6O + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.WYojHjQEEV ++ mktemp + local LAST_ERR=/tmp/tmp.G0NCB6Lnhx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WYojHjQEEV + cat /tmp/tmp.G0NCB6Lnhx + rm /tmp/tmp.WYojHjQEEV /tmp/tmp.G0NCB6Lnhx + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.676WVZsqYQ ++ mktemp + local LAST_ERR=/tmp/tmp.MPh2uWNzrg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.676WVZsqYQ + cat /tmp/tmp.MPh2uWNzrg + rm /tmp/tmp.676WVZsqYQ /tmp/tmp.MPh2uWNzrg + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.VypJUYprir ++ mktemp + local LAST_ERR=/tmp/tmp.wFK1ivgQlL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VypJUYprir + cat /tmp/tmp.wFK1ivgQlL + rm /tmp/tmp.VypJUYprir /tmp/tmp.wFK1ivgQlL + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.hBVjcFYinF ++ mktemp + local LAST_ERR=/tmp/tmp.VFoaUB7Vzp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1961/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hBVjcFYinF clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.VFoaUB7Vzp + rm /tmp/tmp.hBVjcFYinF /tmp/tmp.VFoaUB7Vzp + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.IKyBi1Ih5h ++ mktemp + local LAST_ERR=/tmp/tmp.G5hXwDgmMH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.IKyBi1Ih5h + cat /tmp/tmp.G5hXwDgmMH Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.IKyBi1Ih5h + cat /tmp/tmp.G5hXwDgmMH Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.IKyBi1Ih5h + cat /tmp/tmp.G5hXwDgmMH Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.IKyBi1Ih5h + cat /tmp/tmp.G5hXwDgmMH Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.IKyBi1Ih5h /tmp/tmp.G5hXwDgmMH + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace demand-backup-19332 + rm -rf /tmp/tmp.lfGJl3onxJ + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.MwDv4GqcvT + local LAST_OUT=/tmp/tmp.Ku67bSvdXd ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.eO2WwP6GTF + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.SH0TmdNbKB + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace demand-backup-19332 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator