Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/logs/demand-backup-incremental.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra demand-backup-incremental-31953 + local ns=demand-backup-incremental-31953 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.zVfkbal5Yr ++ mktemp + local LAST_ERR=/tmp/tmp.9BACCNeVEK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zVfkbal5Yr customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.9BACCNeVEK + rm /tmp/tmp.zVfkbal5Yr /tmp/tmp.9BACCNeVEK + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n demand-backup-incremental-14944 backup-aws-s3 --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 patched + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n demand-backup-incremental-14944 backup-azure-blob --type=merge -p '{"metadata":{"finalizers":[]}}' Error from server (NotFound): perconaservermongodbbackups.psmdb.percona.com "backup-azure-blob" not found + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n demand-backup-incremental-14944 backup-gcp-cs --type=merge -p '{"metadata":{"finalizers":[]}}' Error from server (NotFound): perconaservermongodbbackups.psmdb.percona.com "backup-gcp-cs" not found + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n demand-backup-incremental-14944 backup-minio-not-base --type=merge -p '{"metadata":{"finalizers":[]}}' Error from server (NotFound): perconaservermongodbbackups.psmdb.percona.com "backup-minio-not-base" not found + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.3koZNAL1dV ++ mktemp + local LAST_ERR=/tmp/tmp.48JTRLz3eY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3koZNAL1dV customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com condition met + cat /tmp/tmp.48JTRLz3eY + rm /tmp/tmp.3koZNAL1dV /tmp/tmp.48JTRLz3eY + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.Gb7rt3v7XW ++ mktemp + local LAST_ERR=/tmp/tmp.4W8esyvSxY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Gb7rt3v7XW + cat /tmp/tmp.4W8esyvSxY + rm /tmp/tmp.Gb7rt3v7XW /tmp/tmp.4W8esyvSxY + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.C10ZlaJXMD ++ mktemp + local LAST_ERR=/tmp/tmp.QavBn8GMxV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C10ZlaJXMD + cat /tmp/tmp.QavBn8GMxV + rm /tmp/tmp.C10ZlaJXMD /tmp/tmp.QavBn8GMxV + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.L8E5zUknnn ++ mktemp + local LAST_ERR=/tmp/tmp.0D7DPGxRyJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.L8E5zUknnn clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.0D7DPGxRyJ + rm /tmp/tmp.L8E5zUknnn /tmp/tmp.0D7DPGxRyJ + return 0 + check_crd_for_deletion PR-2092-7720117d + local git_tag=PR-2092-7720117d ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2092-7720117d/deploy/crd.yaml ++ /usr/sbin/sed s/---//g ++ yq eval .metadata.name ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iFI77uFtwT +++ mktemp ++ local LAST_ERR=/tmp/tmp.1JmbeRN2fR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.iFI77uFtwT ++ cat /tmp/tmp.1JmbeRN2fR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.iFI77uFtwT ++ cat /tmp/tmp.1JmbeRN2fR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.iFI77uFtwT ++ cat /tmp/tmp.1JmbeRN2fR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.iFI77uFtwT ++ cat /tmp/tmp.1JmbeRN2fR Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.iFI77uFtwT /tmp/tmp.1JmbeRN2fR ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + xargs kubectl delete ns + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.J5qCZ7U4bZ ++ mktemp + local LAST_OUT=/tmp/tmp.jgjkAFOsut ++ mktemp + local LAST_ERR=/tmp/tmp.brVH14UAZC + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.74oWxmV62Q + local exit_status=0 + local timeout=4 + for i in $(seq 0 2) + set +e + kubectl get ns ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.J5qCZ7U4bZ + cat /tmp/tmp.brVH14UAZC + rm /tmp/tmp.J5qCZ7U4bZ /tmp/tmp.brVH14UAZC + return 0 namespace "demand-backup-incremental-14944" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jgjkAFOsut namespace "psmdb-operator" deleted + cat /tmp/tmp.74oWxmV62Q + rm /tmp/tmp.jgjkAFOsut /tmp/tmp.74oWxmV62Q + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.NMNSfI4Vz1 ++ mktemp + local LAST_ERR=/tmp/tmp.cLuls4PddL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NMNSfI4Vz1 + cat /tmp/tmp.cLuls4PddL + rm /tmp/tmp.NMNSfI4Vz1 /tmp/tmp.cLuls4PddL + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.T1DQEzBBsA ++ mktemp + local LAST_ERR=/tmp/tmp.9XnR0P0jTL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.T1DQEzBBsA namespace/psmdb-operator created + cat /tmp/tmp.9XnR0P0jTL + rm /tmp/tmp.T1DQEzBBsA /tmp/tmp.9XnR0P0jTL + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.nwqIQ5vDAU +++ mktemp ++ local LAST_ERR=/tmp/tmp.lDY8nKjcmb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nwqIQ5vDAU ++ cat /tmp/tmp.lDY8nKjcmb ++ rm /tmp/tmp.nwqIQ5vDAU /tmp/tmp.lDY8nKjcmb ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2092-7720117d-2-cluster1 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.CZXX6PqUDp ++ mktemp + local LAST_ERR=/tmp/tmp.EwrtEQRjhh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2092-7720117d-2-cluster1 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CZXX6PqUDp Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2092-7720117d-2-cluster1" modified. + cat /tmp/tmp.EwrtEQRjhh + rm /tmp/tmp.CZXX6PqUDp /tmp/tmp.EwrtEQRjhh + return 0 + deploy_operator + desc 'start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2092-7720117d' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2092-7720117d ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.0Y831wz136 ++ mktemp + local LAST_ERR=/tmp/tmp.obDhARV1yt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0Y831wz136 customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.obDhARV1yt + rm /tmp/tmp.0Y831wz136 /tmp/tmp.obDhARV1yt + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.1RBCqG9IxB ++ mktemp + local LAST_ERR=/tmp/tmp.SqxFQFFVYe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1RBCqG9IxB clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.SqxFQFFVYe + rm /tmp/tmp.1RBCqG9IxB /tmp/tmp.SqxFQFFVYe + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-2092-7720117d") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.x8gsNR7Iom ++ mktemp + local LAST_ERR=/tmp/tmp.VrZ76OMsQx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.x8gsNR7Iom deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.VrZ76OMsQx + rm /tmp/tmp.x8gsNR7Iom /tmp/tmp.VrZ76OMsQx + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.oJH0AadB7p +++ mktemp ++ local LAST_ERR=/tmp/tmp.WHokfEYXC4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oJH0AadB7p ++ cat /tmp/tmp.WHokfEYXC4 ++ rm /tmp/tmp.oJH0AadB7p /tmp/tmp.WHokfEYXC4 ++ return 0 + wait_operator_pod percona-server-mongodb-operator-666b579f95-jjf77 + local pod=percona-server-mongodb-operator-666b579f95-jjf77 + set +o xtrace waiting for pod/percona-server-mongodb-operator-666b579f95-jjf77 to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Rc0vmM8gg +++ mktemp ++ local LAST_ERR=/tmp/tmp.hu0HnZQuER ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6Rc0vmM8gg ++ cat /tmp/tmp.hu0HnZQuER ++ rm /tmp/tmp.6Rc0vmM8gg /tmp/tmp.hu0HnZQuER ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-666b579f95-jjf77 ++ mktemp + local LAST_OUT=/tmp/tmp.Ds0XcptBOM ++ mktemp + local LAST_ERR=/tmp/tmp.5mLLfxMJfG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-666b579f95-jjf77 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ds0XcptBOM + cat /tmp/tmp.5mLLfxMJfG + rm /tmp/tmp.Ds0XcptBOM /tmp/tmp.5mLLfxMJfG + return 0 2025-10-18T12:11:38.659Z INFO setup Manager starting up {"gitCommit": "7720117d4fda64c4e5b68831361fb7ab358c7057", "gitBranch": "PR-2092-7720117d", "buildTime": "", "goVersion": "go1.25.3", "os": "linux", "arch": "amd64"} + create_namespace demand-backup-incremental-31953 + local namespace=demand-backup-incremental-31953 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces demand-backup-incremental-31953' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-incremental-31953 ----------------------------------------------------------------------------------- ++ mktemp + kubectl_bin delete namespace demand-backup-incremental-31953 --ignore-not-found + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.r1S6fSOd8d ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.0z8BH4qa3m ++ mktemp + local LAST_ERR=/tmp/tmp.bLMhPE7DS2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.n8wLne0YaI + local exit_status=0 + local timeout=4 + for i in $(seq 0 2) + set +e + kubectl get ns ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace demand-backup-incremental-31953 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.r1S6fSOd8d + cat /tmp/tmp.bLMhPE7DS2 + rm /tmp/tmp.r1S6fSOd8d /tmp/tmp.bLMhPE7DS2 + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0z8BH4qa3m + cat /tmp/tmp.n8wLne0YaI + rm /tmp/tmp.0z8BH4qa3m /tmp/tmp.n8wLne0YaI + return 0 + kubectl_bin wait --for=delete namespace demand-backup-incremental-31953 ++ mktemp + local LAST_OUT=/tmp/tmp.7HftGeSEdD ++ mktemp + local LAST_ERR=/tmp/tmp.OPj72NGNUk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace demand-backup-incremental-31953 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7HftGeSEdD + cat /tmp/tmp.OPj72NGNUk + rm /tmp/tmp.7HftGeSEdD /tmp/tmp.OPj72NGNUk + return 0 + desc 'create namespace demand-backup-incremental-31953' + set +o xtrace ----------------------------------------------------------------------------------- create namespace demand-backup-incremental-31953 ----------------------------------------------------------------------------------- + kubectl_bin create namespace demand-backup-incremental-31953 ++ mktemp + local LAST_OUT=/tmp/tmp.I517YHHVGg ++ mktemp + local LAST_ERR=/tmp/tmp.88JCdNw9EY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace demand-backup-incremental-31953 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.I517YHHVGg namespace/demand-backup-incremental-31953 created + cat /tmp/tmp.88JCdNw9EY + rm /tmp/tmp.I517YHHVGg /tmp/tmp.88JCdNw9EY + return 0 + set_kube_ctx demand-backup-incremental-31953 + local namespace=demand-backup-incremental-31953 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.fGnpPu7wvI +++ mktemp ++ local LAST_ERR=/tmp/tmp.AXiXvhs9PS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fGnpPu7wvI ++ cat /tmp/tmp.AXiXvhs9PS ++ rm /tmp/tmp.fGnpPu7wvI /tmp/tmp.AXiXvhs9PS ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2092-7720117d-2-cluster1 --namespace=demand-backup-incremental-31953 ++ mktemp + local LAST_OUT=/tmp/tmp.71FdDqjHFa ++ mktemp + local LAST_ERR=/tmp/tmp.xKbN6pctjO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2092-7720117d-2-cluster1 --namespace=demand-backup-incremental-31953 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.71FdDqjHFa Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2092-7720117d-2-cluster1" modified. + cat /tmp/tmp.xKbN6pctjO + rm /tmp/tmp.71FdDqjHFa /tmp/tmp.xKbN6pctjO + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Sat Oct 18 12:12:17 2025 NAMESPACE: demand-backup-incremental-31953 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-incremental-31953.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-incremental-31953 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-incremental-31953 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-incremental-31953 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-incremental-31953 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LBA6lzUmpu +++ mktemp ++ local LAST_ERR=/tmp/tmp.GUDD3aYH9B ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LBA6lzUmpu ++ cat /tmp/tmp.GUDD3aYH9B ++ rm /tmp/tmp.LBA6lzUmpu /tmp/tmp.GUDD3aYH9B ++ return 0 + MINIO_POD=minio-service-d9589b474-r4qtq + wait_pod minio-service-d9589b474-r4qtq + local pod=minio-service-d9589b474-r4qtq + set +o xtrace waiting for pod/minio-service-d9589b474-r4qtq to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-incremental-31953.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.SMbWFC9rTZ ++ mktemp + local LAST_ERR=/tmp/tmp.7pwsW24oOA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-incremental-31953.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SMbWFC9rTZ service/minio-service created + cat /tmp/tmp.7pwsW24oOA + rm /tmp/tmp.SMbWFC9rTZ /tmp/tmp.7pwsW24oOA + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.SkSfz1yc6y ++ mktemp + local LAST_ERR=/tmp/tmp.MHC2fUpBsj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SkSfz1yc6y make_bucket: operator-testing pod "aws-cli" deleted from demand-backup-incremental-31953 namespace + cat /tmp/tmp.MHC2fUpBsj All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-incremental-31953 + rm /tmp/tmp.SkSfz1yc6y /tmp/tmp.MHC2fUpBsj + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.SGGqnCwYcn ++ mktemp + local LAST_ERR=/tmp/tmp.guCcoBItBD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SGGqnCwYcn secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created secret/gcp-cs-sa-key-secret created + cat /tmp/tmp.guCcoBItBD + rm /tmp/tmp.SGGqnCwYcn /tmp/tmp.guCcoBItBD + return 0 + desc 'Testing on not sharded cluster' + set +o xtrace ----------------------------------------------------------------------------------- Testing on not sharded cluster ----------------------------------------------------------------------------------- + log 'Creating PSMDB cluster' + set +o xtrace [2025-10-18T12:12:57+0000] Creating PSMDB cluster + cluster=some-name + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.BYZO9nk3rd ++ mktemp + local LAST_ERR=/tmp/tmp.C4JdJ7rz6V + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BYZO9nk3rd secret/some-users created + cat /tmp/tmp.C4JdJ7rz6V + rm /tmp/tmp.BYZO9nk3rd /tmp/tmp.C4JdJ7rz6V + return 0 + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/some-name.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2092-7720117d"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.rjtXqceXYj ++ mktemp + local LAST_ERR=/tmp/tmp.FNvuCNh8Fb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rjtXqceXYj perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.FNvuCNh8Fb + rm /tmp/tmp.rjtXqceXYj /tmp/tmp.FNvuCNh8Fb + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/conf/client_with_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.n7QvMbXoyj ++ mktemp + local LAST_ERR=/tmp/tmp.Az8itIDNbr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/conf/client_with_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.n7QvMbXoyj deployment.apps/psmdb-client created + cat /tmp/tmp.Az8itIDNbr + rm /tmp/tmp.n7QvMbXoyj /tmp/tmp.Az8itIDNbr + return 0 + log 'check if all pods started' + set +o xtrace [2025-10-18T12:13:03+0000] check if all pods started + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready........OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.....OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OI1KyevgJQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.wbczjowedZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OI1KyevgJQ ++ cat /tmp/tmp.wbczjowedZ ++ rm /tmp/tmp.OI1KyevgJQ /tmp/tmp.wbczjowedZ ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready........OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zyMQWcoZhO +++ mktemp ++ local LAST_ERR=/tmp/tmp.vIurByLSWF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zyMQWcoZhO ++ cat /tmp/tmp.vIurByLSWF ++ rm /tmp/tmp.zyMQWcoZhO /tmp/tmp.vIurByLSWF ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9AStlAcYPZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.r4FT3GXcPc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9AStlAcYPZ ++ cat /tmp/tmp.r4FT3GXcPc ++ rm /tmp/tmp.9AStlAcYPZ /tmp/tmp.r4FT3GXcPc ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9XajDeQC5O +++ mktemp ++ local LAST_ERR=/tmp/tmp.YjH8fF8X76 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9XajDeQC5O ++ cat /tmp/tmp.YjH8fF8X76 ++ rm /tmp/tmp.9XajDeQC5O /tmp/tmp.YjH8fF8X76 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + log 'writing test data' + set +o xtrace [2025-10-18T12:14:31+0000] writing test data + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-incremental-31953 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ALrKI20QZS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wdw3XRv6BW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ALrKI20QZS ++ cat /tmp/tmp.Wdw3XRv6BW ++ rm /tmp/tmp.ALrKI20QZS /tmp/tmp.Wdw3XRv6BW ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.6Ie98amvRd ++ mktemp + local LAST_ERR=/tmp/tmp.lCFm7hMyDe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6Ie98amvRd Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("9ba31d2d-aa22-4a9a-8d2e-a1c63e5f07c1") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.lCFm7hMyDe + rm /tmp/tmp.6Ie98amvRd /tmp/tmp.lCFm7hMyDe + return 0 + sleep 1 + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5oylimgYqA +++ mktemp ++ local LAST_ERR=/tmp/tmp.udHTlSIyaJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5oylimgYqA ++ cat /tmp/tmp.udHTlSIyaJ ++ rm /tmp/tmp.5oylimgYqA /tmp/tmp.udHTlSIyaJ ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.chjeVo9GFa ++ mktemp + local LAST_ERR=/tmp/tmp.NHXMMTME6C + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.chjeVo9GFa Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("0bec75de-1ec6-4272-bc14-af4827003b15") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.NHXMMTME6C + rm /tmp/tmp.chjeVo9GFa /tmp/tmp.NHXMMTME6C + return 0 + sleep 5 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:14:44+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.5RgCL2UBZE +++ mktemp ++ local LAST_ERR=/tmp/tmp.qUnWvVafnw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5RgCL2UBZE ++ cat /tmp/tmp.qUnWvVafnw ++ rm /tmp/tmp.5RgCL2UBZE /tmp/tmp.qUnWvVafnw ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.RHEToj7jrb ++ mktemp + local LAST_ERR=/tmp/tmp.1jQylenkEn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RHEToj7jrb + cat /tmp/tmp.1jQylenkEn + rm /tmp/tmp.RHEToj7jrb /tmp/tmp.1jQylenkEn + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:14:47+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.4hI49ZiSob +++ mktemp ++ local LAST_ERR=/tmp/tmp.GRkNFy9zZU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4hI49ZiSob ++ cat /tmp/tmp.GRkNFy9zZU ++ rm /tmp/tmp.4hI49ZiSob /tmp/tmp.GRkNFy9zZU ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.csgJQ4Gb6G ++ mktemp + local LAST_ERR=/tmp/tmp.oyVA4Dq7Ax + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.csgJQ4Gb6G + cat /tmp/tmp.oyVA4Dq7Ax + rm /tmp/tmp.csgJQ4Gb6G /tmp/tmp.oyVA4Dq7Ax + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:14:49+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.NpUwHtMmTo +++ mktemp ++ local LAST_ERR=/tmp/tmp.l4Y5utWQQ8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NpUwHtMmTo ++ cat /tmp/tmp.l4Y5utWQQ8 ++ rm /tmp/tmp.NpUwHtMmTo /tmp/tmp.l4Y5utWQQ8 ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.FmAD3jBqXB ++ mktemp + local LAST_ERR=/tmp/tmp.ehxfKaB9dj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FmAD3jBqXB + cat /tmp/tmp.ehxfKaB9dj + rm /tmp/tmp.FmAD3jBqXB /tmp/tmp.ehxfKaB9dj + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + log 'running backups' + set +o xtrace [2025-10-18T12:14:52+0000] running backups + '[' -z '' ']' + backup_name_aws=backup-aws-s3 + backup_name_gcp=backup-gcp-cs + backup_name_azure=backup-azure-blob + run_backup aws-s3 backup-aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local base=true + local backup_type=incremental + [[ true == \t\r\u\e ]] + backup_type=incremental-base + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/backup.yml + /usr/sbin/sed -e 's/name:/name: backup-aws-s3/' + /usr/sbin/sed -e 's/storageName:/storageName: aws-s3/' + yq '.spec.type="incremental-base"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.htRKB0T20A ++ mktemp + local LAST_ERR=/tmp/tmp.P8BQk3lxFl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.htRKB0T20A perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.P8BQk3lxFl + rm /tmp/tmp.htRKB0T20A /tmp/tmp.P8BQk3lxFl + return 0 + run_backup gcp-cs backup-gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + local base=true + local backup_type=incremental + [[ true == \t\r\u\e ]] + backup_type=incremental-base + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/backup.yml + /usr/sbin/sed -e 's/name:/name: backup-gcp-cs/' + /usr/sbin/sed -e 's/storageName:/storageName: gcp-cs/' + yq '.spec.type="incremental-base"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.joKRMv9Zkm ++ mktemp + local LAST_ERR=/tmp/tmp.BIb8gro6TG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.joKRMv9Zkm perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.BIb8gro6TG + rm /tmp/tmp.joKRMv9Zkm /tmp/tmp.BIb8gro6TG + return 0 + run_backup azure-blob backup-azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local base=true + local backup_type=incremental + [[ true == \t\r\u\e ]] + backup_type=incremental-base + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/backup.yml + /usr/sbin/sed -e 's/name:/name: backup-azure-blob/' + /usr/sbin/sed -e 's/storageName:/storageName: azure-blob/' + yq '.spec.type="incremental-base"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.DArWXxFyUK ++ mktemp + local LAST_ERR=/tmp/tmp.LXviRfIFna + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DArWXxFyUK perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.LXviRfIFna + rm /tmp/tmp.DArWXxFyUK /tmp/tmp.LXviRfIFna + return 0 + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state..............OK + check_backup_in_storage backup-aws-s3 s3 rs0 + local backup=backup-aws-s3 + local storage_type=s3 + local replset=rs0 + local file=filelist.pbm + local endpoint + case ${storage_type} in + endpoint=s3.amazonaws.com ++ get_backup_dest backup-aws-s3 ++ /usr/sbin/sed 's|https://engk8soperators.blob.core.windows.net/||' ++ local backup_name=backup-aws-s3 ++ kubectl_bin get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.P6YHF0QmmA +++ mktemp ++ local LAST_ERR=/tmp/tmp.hW9qBUcxNz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.P6YHF0QmmA ++ cat /tmp/tmp.hW9qBUcxNz ++ rm /tmp/tmp.P6YHF0QmmA /tmp/tmp.hW9qBUcxNz ++ return 0 + backup_dest=operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:14:55Z + [[ s3 == \m\i\n\i\o ]] + local url=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:14:55Z/rs0/filelist.pbm + log 'checking if https://s3.amazonaws.com/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:14:55Z/rs0/filelist.pbm exists' + set +o xtrace [2025-10-18T12:15:28+0000] checking if https://s3.amazonaws.com/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:14:55Z/rs0/filelist.pbm exists + curl --fail --head https://s3.amazonaws.com/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:14:55Z/rs0/filelist.pbm % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 13661 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 HTTP/1.1 200 OK x-amz-id-2: lJAV/cQdQToG547pkRpiHGbPmoORlWKN8DV9v3qPrDxXTLoMdiDKSwnhdDlw7mlhmqxd5V61ZWwiOPdwwJKia+fkU8zGqnub4N4xJrUZFTc= x-amz-request-id: 534WA83D8MKPDP7P Date: Sat, 18 Oct 2025 12:15:29 GMT Last-Modified: Sat, 18 Oct 2025 12:15:22 GMT x-amz-expiration: expiry-date="Mon, 20 Oct 2025 00:00:00 GMT", rule-id="1 Days Cleanup" ETag: "8fdff3c4edc8db27eb0949a99cc0af9d" x-amz-server-side-encryption: AES256 Accept-Ranges: bytes Content-Type: application/octet-stream Content-Length: 13661 Server: AmazonS3 + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + local target_state=ready + set +o xtrace waiting for backup-gcp-cs to reach ready state...........OK + check_backup_in_storage backup-gcp-cs gcs rs0 + local backup=backup-gcp-cs + local storage_type=gcs + local replset=rs0 + local file=filelist.pbm + local endpoint + case ${storage_type} in + endpoint=storage.googleapis.com ++ get_backup_dest backup-gcp-cs ++ local backup_name=backup-gcp-cs ++ /usr/sbin/sed 's|https://engk8soperators.blob.core.windows.net/||' ++ kubectl_bin get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|gs://||' ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.Sj1cJkGMYV +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gh7mrmvajp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Sj1cJkGMYV ++ cat /tmp/tmp.Gh7mrmvajp ++ rm /tmp/tmp.Sj1cJkGMYV /tmp/tmp.Gh7mrmvajp ++ return 0 + backup_dest=operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:28Z + [[ gcs == \m\i\n\i\o ]] + local url=https://storage.googleapis.com/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:28Z/rs0/filelist.pbm + log 'checking if https://storage.googleapis.com/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:28Z/rs0/filelist.pbm exists' + set +o xtrace [2025-10-18T12:15:51+0000] checking if https://storage.googleapis.com/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:28Z/rs0/filelist.pbm exists + curl --fail --head https://storage.googleapis.com/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:28Z/rs0/filelist.pbm % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 13575 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 HTTP/2 200 content-type: application/octet-stream x-guploader-uploadid: AAwnv3Ip2pYHUTbbPGG4Mfnm_jzktPI1RjW4S4nTf4rggEOt66DGFKTybeYsddtnb8Zcyg7em2ELn-Y expires: Sat, 18 Oct 2025 13:15:51 GMT date: Sat, 18 Oct 2025 12:15:51 GMT cache-control: public, max-age=3600 last-modified: Sat, 18 Oct 2025 12:15:46 GMT etag: "7a31d708c28daeafc87b366812b19f6c" x-goog-generation: 1760789746212419 x-goog-metageneration: 1 x-goog-stored-content-encoding: identity x-goog-stored-content-length: 13575 x-goog-hash: crc32c=IksH9g== x-goog-hash: md5=ejHXCMKNrq/IezZoErGfbA== x-goog-expiration: Sun, 19 Oct 2025 12:15:46 GMT x-goog-storage-class: REGIONAL accept-ranges: bytes content-length: 13575 server: UploadServer alt-svc: h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state.............OK + check_backup_in_storage backup-azure-blob azure rs0 + local backup=backup-azure-blob + local storage_type=azure + local replset=rs0 + local file=filelist.pbm + local endpoint + case ${storage_type} in + endpoint=engk8soperators.blob.core.windows.net ++ get_backup_dest backup-azure-blob ++ /usr/sbin/sed 's|https://engk8soperators.blob.core.windows.net/||' ++ local backup_name=backup-azure-blob ++ kubectl_bin get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.DRjCHG1ZXY +++ mktemp ++ local LAST_ERR=/tmp/tmp.EAIM8QGNu5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DRjCHG1ZXY ++ cat /tmp/tmp.EAIM8QGNu5 ++ rm /tmp/tmp.DRjCHG1ZXY /tmp/tmp.EAIM8QGNu5 ++ return 0 + backup_dest=operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:51Z + [[ azure == \m\i\n\i\o ]] + local url=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:51Z/rs0/filelist.pbm + log 'checking if https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:51Z/rs0/filelist.pbm exists' + set +o xtrace [2025-10-18T12:16:19+0000] checking if https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:51Z/rs0/filelist.pbm exists + curl --fail --head https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:51Z/rs0/filelist.pbm % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 13661 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 HTTP/1.1 200 OK Content-Length: 13661 Content-Type: application/octet-stream Content-MD5: J9gOtg7xyWhQpbqQx+X+SQ== Last-Modified: Sat, 18 Oct 2025 12:16:09 GMT ETag: 0x8DE0E401F3BFE9F Server: Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0 x-ms-request-id: 67ad29f3-701e-0095-3329-4037da000000 x-ms-version: 2009-09-19 x-ms-lease-status: unlocked x-ms-blob-type: BlockBlob Date: Sat, 18 Oct 2025 12:16:19 GMT + backup_name_minio=backup-minio + run_backup minio backup-minio + local storage=minio + local backup_name=backup-minio + local base=true + local backup_type=incremental + [[ true == \t\r\u\e ]] + backup_type=incremental-base + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/backup.yml + /usr/sbin/sed -e 's/name:/name: backup-minio/' + /usr/sbin/sed -e 's/storageName:/storageName: minio/' + yq '.spec.type="incremental-base"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0HzBF9Q9t6 ++ mktemp + local LAST_ERR=/tmp/tmp.nPd6ANGJNm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0HzBF9Q9t6 perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.nPd6ANGJNm + rm /tmp/tmp.0HzBF9Q9t6 /tmp/tmp.nPd6ANGJNm + return 0 + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state......OK + check_backup_in_storage backup-minio minio rs0 + local backup=backup-minio + local storage_type=minio + local replset=rs0 + local file=filelist.pbm + local endpoint + case ${storage_type} in + endpoint=minio-service ++ get_backup_dest backup-minio ++ /usr/sbin/sed 's|https://engk8soperators.blob.core.windows.net/||' ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.ae8i6JULVj +++ mktemp ++ local LAST_ERR=/tmp/tmp.8kyFldecG9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ae8i6JULVj ++ cat /tmp/tmp.8kyFldecG9 ++ rm /tmp/tmp.ae8i6JULVj /tmp/tmp.8kyFldecG9 ++ return 0 + backup_dest=operator-testing/2025-10-18T12:16:21Z + [[ minio == \m\i\n\i\o ]] + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-10-18T12:16:21Z/rs0/filelist.pbm + grep filelist.pbm ++ mktemp + local LAST_OUT=/tmp/tmp.mT3v9jlQti ++ mktemp + local LAST_ERR=/tmp/tmp.YLKu6z4D9M + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-10-18T12:16:21Z/rs0/filelist.pbm + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mT3v9jlQti + cat /tmp/tmp.YLKu6z4D9M + rm /tmp/tmp.mT3v9jlQti /tmp/tmp.YLKu6z4D9M + return 0 2025-10-18 12:16:29 13575 filelist.pbm + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M6EXb8uP82 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XvVC78GXDW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.M6EXb8uP82 ++ cat /tmp/tmp.XvVC78GXDW ++ rm /tmp/tmp.M6EXb8uP82 /tmp/tmp.XvVC78GXDW ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.2mh4L7VWUi ++ mktemp + local LAST_ERR=/tmp/tmp.Phh8f7ypOt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2mh4L7VWUi Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("9e159a51-2734-4aa1-86c6-ef0d546c93e2") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.Phh8f7ypOt + rm /tmp/tmp.2mh4L7VWUi /tmp/tmp.Phh8f7ypOt + return 0 + sleep 5 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 -not-base + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local postfix=-not-base + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:16:44+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.BdFvW0s0Yg +++ mktemp ++ local LAST_ERR=/tmp/tmp.RElip1EzWj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BdFvW0s0Yg ++ cat /tmp/tmp.RElip1EzWj ++ rm /tmp/tmp.BdFvW0s0Yg /tmp/tmp.RElip1EzWj ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Pd1lbSej7M ++ mktemp + local LAST_ERR=/tmp/tmp.l9JdrNviWF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Pd1lbSej7M + cat /tmp/tmp.l9JdrNviWF + rm /tmp/tmp.Pd1lbSej7M /tmp/tmp.l9JdrNviWF + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find-not-base.json /tmp/tmp.yrUrhm9oZh/find-not-base + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 -not-base + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local postfix=-not-base + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:16:47+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.ncLXmvGC2O +++ mktemp ++ local LAST_ERR=/tmp/tmp.BYAzR0YdnA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ncLXmvGC2O ++ cat /tmp/tmp.BYAzR0YdnA ++ rm /tmp/tmp.ncLXmvGC2O /tmp/tmp.BYAzR0YdnA ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.pXK6PwPS7l ++ mktemp + local LAST_ERR=/tmp/tmp.JCbszSA7og + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pXK6PwPS7l + cat /tmp/tmp.JCbszSA7og + rm /tmp/tmp.pXK6PwPS7l /tmp/tmp.JCbszSA7og + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find-not-base.json /tmp/tmp.yrUrhm9oZh/find-not-base + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 -not-base + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local postfix=-not-base + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:16:50+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zbbbxi4yR6 egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.unOJyCnCHb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Zbbbxi4yR6 ++ cat /tmp/tmp.unOJyCnCHb ++ rm /tmp/tmp.Zbbbxi4yR6 /tmp/tmp.unOJyCnCHb ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.dlqrLraM9G ++ mktemp + local LAST_ERR=/tmp/tmp.nZFmksTB7V + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dlqrLraM9G + cat /tmp/tmp.nZFmksTB7V + rm /tmp/tmp.dlqrLraM9G /tmp/tmp.nZFmksTB7V + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find-not-base.json /tmp/tmp.yrUrhm9oZh/find-not-base + backup_name_minio_not_base=backup-minio-not-base + run_backup minio backup-minio-not-base false + local storage=minio + local backup_name=backup-minio-not-base + local base=false + local backup_type=incremental + [[ false == \t\r\u\e ]] + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/backup.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-not-base/' + /usr/sbin/sed -e 's/storageName:/storageName: minio/' + yq '.spec.type="incremental"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.drdWyrhwx5 ++ mktemp + local LAST_ERR=/tmp/tmp.4sYWYC5sE2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.drdWyrhwx5 perconaservermongodbbackup.psmdb.percona.com/backup-minio-not-base created + cat /tmp/tmp.4sYWYC5sE2 + rm /tmp/tmp.drdWyrhwx5 /tmp/tmp.4sYWYC5sE2 + return 0 + wait_backup backup-minio-not-base + local backup_name=backup-minio-not-base + local target_state=ready + set +o xtrace waiting for backup-minio-not-base to reach ready state......OK + check_backup_in_storage backup-minio-not-base minio rs0 + local backup=backup-minio-not-base + local storage_type=minio + local replset=rs0 + local file=filelist.pbm + local endpoint + case ${storage_type} in + endpoint=minio-service ++ get_backup_dest backup-minio-not-base ++ local backup_name=backup-minio-not-base ++ /usr/sbin/sed 's|https://engk8soperators.blob.core.windows.net/||' ++ kubectl_bin get psmdb-backup backup-minio-not-base -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' +++ mktemp ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.QTlAvhHvwk +++ mktemp ++ local LAST_ERR=/tmp/tmp.uAeZ2KwwEE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-not-base -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QTlAvhHvwk ++ cat /tmp/tmp.uAeZ2KwwEE ++ rm /tmp/tmp.QTlAvhHvwk /tmp/tmp.uAeZ2KwwEE ++ return 0 + backup_dest=operator-testing/2025-10-18T12:16:56Z + [[ minio == \m\i\n\i\o ]] + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-10-18T12:16:56Z/rs0/filelist.pbm + grep filelist.pbm ++ mktemp + local LAST_OUT=/tmp/tmp.0nMOA7e1Zo ++ mktemp + local LAST_ERR=/tmp/tmp.x7f8QXggg3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-10-18T12:16:56Z/rs0/filelist.pbm + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0nMOA7e1Zo + cat /tmp/tmp.x7f8QXggg3 All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-incremental-31953 + rm /tmp/tmp.0nMOA7e1Zo /tmp/tmp.x7f8QXggg3 + return 0 2025-10-18 12:17:03 14375 filelist.pbm + '[' -z '' ']' + run_restore backup-aws-s3 + local backup_name=backup-aws-s3 + log 'drop collection' + set +o xtrace [2025-10-18T12:17:12+0000] drop collection + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2hA2t7W6oB +++ mktemp ++ local LAST_ERR=/tmp/tmp.v6F8qyhnBp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2hA2t7W6oB ++ cat /tmp/tmp.v6F8qyhnBp ++ rm /tmp/tmp.2hA2t7W6oB /tmp/tmp.v6F8qyhnBp ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.FJzdj5mrUb ++ mktemp + local LAST_ERR=/tmp/tmp.il7YCZSO95 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FJzdj5mrUb Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("da20fb1a-a94d-43eb-9e5f-dac8ada48628") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.il7YCZSO95 + rm /tmp/tmp.FJzdj5mrUb /tmp/tmp.il7YCZSO95 + return 0 + log 'check backup and restore -- backup-aws-s3' + set +o xtrace [2025-10-18T12:17:14+0000] check backup and restore -- backup-aws-s3 + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-aws-s3/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-aws-s3/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.30VvFyKalD ++ mktemp + local LAST_ERR=/tmp/tmp.Ny0YZapuwr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.30VvFyKalD perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created + cat /tmp/tmp.Ny0YZapuwr + rm /tmp/tmp.30VvFyKalD /tmp/tmp.Ny0YZapuwr + return 0 + run_recovery_check backup-aws-s3 + local backup_name=backup-aws-s3 + local compare_suffix=_restore + local base=true + wait_restore backup-aws-s3 some-name requested 0 3000 + local backup_name=backup-aws-s3 + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=3000 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-aws-s3 object to be created.OK Waiting psmdb-restore/restore-backup-aws-s3 to reach state "requested" ..OK after 1 minutes + [[ 0 -eq 1 ]] + echo + compare_kubectl statefulset/some-name-rs0 _restore + local resource=statefulset/some-name-rs0 + local postfix=_restore + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml + local new_result=/tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-incremental-31953", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.7zP5wVcSWg ++ mktemp + local LAST_ERR=/tmp/tmp.ABDGUJZnh3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7zP5wVcSWg + cat /tmp/tmp.ABDGUJZnh3 + rm /tmp/tmp.7zP5wVcSWg /tmp/tmp.ABDGUJZnh3 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2025-10-18T12:19:18+0000] compare_kubectl: statefulset/some-name-rs0 OK + wait_restore backup-aws-s3 some-name ready 0 1800 + local backup_name=backup-aws-s3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1800 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-aws-s3 object to be created.OK Waiting psmdb-restore/restore-backup-aws-s3 to reach state "ready" ..OK after 1 minutes + [[ 0 -eq 1 ]] ++ kubectl_bin get psmdb some-name -o yaml ++ yq '.metadata.annotations."percona.com/resync-pbm"' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H9ae3Dfotl +++ mktemp ++ local LAST_ERR=/tmp/tmp.wRT5tfJ8m8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o yaml ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.H9ae3Dfotl ++ cat /tmp/tmp.wRT5tfJ8m8 ++ rm /tmp/tmp.H9ae3Dfotl /tmp/tmp.wRT5tfJ8m8 ++ return 0 + '[' true == null ']' + echo + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Q4LhrGjxq +++ mktemp ++ local LAST_ERR=/tmp/tmp.g7Bw16YL4X ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7Q4LhrGjxq ++ cat /tmp/tmp.g7Bw16YL4X ++ rm /tmp/tmp.7Q4LhrGjxq /tmp/tmp.g7Bw16YL4X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.34TaP9reSX +++ mktemp ++ local LAST_ERR=/tmp/tmp.lPa9AeTxET ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.34TaP9reSX ++ cat /tmp/tmp.lPa9AeTxET ++ rm /tmp/tmp.34TaP9reSX /tmp/tmp.lPa9AeTxET ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i6s8Layp6k +++ mktemp ++ local LAST_ERR=/tmp/tmp.YWmhauRVbW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.i6s8Layp6k ++ cat /tmp/tmp.YWmhauRVbW ++ rm /tmp/tmp.i6s8Layp6k /tmp/tmp.YWmhauRVbW ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BlCghrIJWn +++ mktemp ++ local LAST_ERR=/tmp/tmp.wi6X06zTPy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BlCghrIJWn ++ cat /tmp/tmp.wi6X06zTPy ++ rm /tmp/tmp.BlCghrIJWn /tmp/tmp.wi6X06zTPy ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vo6zKixFJW +++ mktemp ++ local LAST_ERR=/tmp/tmp.ngbkBXVOZl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Vo6zKixFJW ++ cat /tmp/tmp.ngbkBXVOZl ++ rm /tmp/tmp.Vo6zKixFJW /tmp/tmp.ngbkBXVOZl ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GSXeznTRzE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZX1xv1zEJX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GSXeznTRzE ++ cat /tmp/tmp.ZX1xv1zEJX ++ rm /tmp/tmp.GSXeznTRzE /tmp/tmp.ZX1xv1zEJX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ddkiM5ROxu +++ mktemp ++ local LAST_ERR=/tmp/tmp.j8FNhZsM1A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ddkiM5ROxu ++ cat /tmp/tmp.j8FNhZsM1A ++ rm /tmp/tmp.ddkiM5ROxu /tmp/tmp.j8FNhZsM1A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dX8lthRcat +++ mktemp ++ local LAST_ERR=/tmp/tmp.3zoIEiFehw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dX8lthRcat ++ cat /tmp/tmp.3zoIEiFehw ++ rm /tmp/tmp.dX8lthRcat /tmp/tmp.3zoIEiFehw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oaEc8CCWLR +++ mktemp ++ local LAST_ERR=/tmp/tmp.xmMK4LmWMX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oaEc8CCWLR ++ cat /tmp/tmp.xmMK4LmWMX ++ rm /tmp/tmp.oaEc8CCWLR /tmp/tmp.xmMK4LmWMX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 9 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e30IYbSwYL +++ mktemp ++ local LAST_ERR=/tmp/tmp.CPRHeCAKrq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.e30IYbSwYL ++ cat /tmp/tmp.CPRHeCAKrq ++ rm /tmp/tmp.e30IYbSwYL /tmp/tmp.CPRHeCAKrq ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + wait_for_pbm_operations some-name + local cluster=some-name + set +o xtrace waiting for PBM operation to finish.OK + [[ true == true ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:22:58+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.9sWs2YT4Vr +++ mktemp ++ local LAST_ERR=/tmp/tmp.ThTrT1ODTO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9sWs2YT4Vr ++ cat /tmp/tmp.ThTrT1ODTO ++ rm /tmp/tmp.9sWs2YT4Vr /tmp/tmp.ThTrT1ODTO ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Ui1Iw5x7oP ++ mktemp + local LAST_ERR=/tmp/tmp.GJKw7HRJxO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ui1Iw5x7oP + cat /tmp/tmp.GJKw7HRJxO + rm /tmp/tmp.Ui1Iw5x7oP /tmp/tmp.GJKw7HRJxO + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:23:00+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.l3sZ4itBnZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.C9fNlh5d7c ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.l3sZ4itBnZ ++ cat /tmp/tmp.C9fNlh5d7c ++ rm /tmp/tmp.l3sZ4itBnZ /tmp/tmp.C9fNlh5d7c ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.yPSdeAUXbb ++ mktemp + local LAST_ERR=/tmp/tmp.7nrWenIWj4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yPSdeAUXbb + cat /tmp/tmp.7nrWenIWj4 + rm /tmp/tmp.yPSdeAUXbb /tmp/tmp.7nrWenIWj4 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:23:03+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.dAvaeDfa8T +++ mktemp ++ local LAST_ERR=/tmp/tmp.JzCTCq8sX8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dAvaeDfa8T ++ cat /tmp/tmp.JzCTCq8sX8 ++ rm /tmp/tmp.dAvaeDfa8T /tmp/tmp.JzCTCq8sX8 ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.3UfxnmNYbm ++ mktemp + local LAST_ERR=/tmp/tmp.TB9zgZxOEa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3UfxnmNYbm + cat /tmp/tmp.TB9zgZxOEa + rm /tmp/tmp.3UfxnmNYbm /tmp/tmp.TB9zgZxOEa + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + run_restore backup-gcp-cs + local backup_name=backup-gcp-cs + log 'drop collection' + set +o xtrace [2025-10-18T12:23:06+0000] drop collection + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.31sCicngrJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.xl1EuDD8Mu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.31sCicngrJ ++ cat /tmp/tmp.xl1EuDD8Mu ++ rm /tmp/tmp.31sCicngrJ /tmp/tmp.xl1EuDD8Mu ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.4mtWWI9t3k ++ mktemp + local LAST_ERR=/tmp/tmp.MEyj5Bhoar + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4mtWWI9t3k Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("9ab6dc3b-0e5e-4a6c-b489-09b75092ddfa") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.MEyj5Bhoar + rm /tmp/tmp.4mtWWI9t3k /tmp/tmp.MEyj5Bhoar + return 0 + log 'check backup and restore -- backup-gcp-cs' + set +o xtrace [2025-10-18T12:23:09+0000] check backup and restore -- backup-gcp-cs + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-gcp-cs/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-gcp-cs/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.1VlBN8hXHn ++ mktemp + local LAST_ERR=/tmp/tmp.sSO63Db5wq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1VlBN8hXHn perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created + cat /tmp/tmp.sSO63Db5wq + rm /tmp/tmp.1VlBN8hXHn /tmp/tmp.sSO63Db5wq + return 0 + run_recovery_check backup-gcp-cs + local backup_name=backup-gcp-cs + local compare_suffix=_restore + local base=true + wait_restore backup-gcp-cs some-name requested 0 3000 + local backup_name=backup-gcp-cs + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=3000 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-gcp-cs object to be created.OK Waiting psmdb-restore/restore-backup-gcp-cs to reach state "requested" ...OK after 2 minutes + [[ 0 -eq 1 ]] + echo + compare_kubectl statefulset/some-name-rs0 _restore + local resource=statefulset/some-name-rs0 + local postfix=_restore + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml + local new_result=/tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-incremental-31953", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.bDqwH3lwg8 ++ mktemp + local LAST_ERR=/tmp/tmp.n7ZJYphJ6o + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bDqwH3lwg8 + cat /tmp/tmp.n7ZJYphJ6o + rm /tmp/tmp.bDqwH3lwg8 /tmp/tmp.n7ZJYphJ6o + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2025-10-18T12:25:25+0000] compare_kubectl: statefulset/some-name-rs0 OK + wait_restore backup-gcp-cs some-name ready 0 1800 + local backup_name=backup-gcp-cs + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1800 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-gcp-cs object to be created.OK Waiting psmdb-restore/restore-backup-gcp-cs to reach state "ready" ..OK after 1 minutes + [[ 0 -eq 1 ]] ++ kubectl_bin get psmdb some-name -o yaml ++ yq '.metadata.annotations."percona.com/resync-pbm"' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xIfcWLvZki +++ mktemp ++ local LAST_ERR=/tmp/tmp.NJnsTgqVqG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o yaml ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xIfcWLvZki ++ cat /tmp/tmp.NJnsTgqVqG ++ rm /tmp/tmp.xIfcWLvZki /tmp/tmp.NJnsTgqVqG ++ return 0 + '[' true == null ']' + echo + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.330jzkFdAd +++ mktemp ++ local LAST_ERR=/tmp/tmp.2QCF0NDWwx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.330jzkFdAd ++ cat /tmp/tmp.2QCF0NDWwx ++ rm /tmp/tmp.330jzkFdAd /tmp/tmp.2QCF0NDWwx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bWZJ6d3qGN +++ mktemp ++ local LAST_ERR=/tmp/tmp.UAoENIIKf3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bWZJ6d3qGN ++ cat /tmp/tmp.UAoENIIKf3 ++ rm /tmp/tmp.bWZJ6d3qGN /tmp/tmp.UAoENIIKf3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5AkLwe0t0m +++ mktemp ++ local LAST_ERR=/tmp/tmp.fhU419VCSt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5AkLwe0t0m ++ cat /tmp/tmp.fhU419VCSt ++ rm /tmp/tmp.5AkLwe0t0m /tmp/tmp.fhU419VCSt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nir8box89J +++ mktemp ++ local LAST_ERR=/tmp/tmp.dxtGGjkl43 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Nir8box89J ++ cat /tmp/tmp.dxtGGjkl43 ++ rm /tmp/tmp.Nir8box89J /tmp/tmp.dxtGGjkl43 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zjNErPHouK +++ mktemp ++ local LAST_ERR=/tmp/tmp.0MSmvDliI6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zjNErPHouK ++ cat /tmp/tmp.0MSmvDliI6 ++ rm /tmp/tmp.zjNErPHouK /tmp/tmp.0MSmvDliI6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dMEhu5Uxd7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PsAmJOdSry ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dMEhu5Uxd7 ++ cat /tmp/tmp.PsAmJOdSry ++ rm /tmp/tmp.dMEhu5Uxd7 /tmp/tmp.PsAmJOdSry ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hnQiuDXeLe +++ mktemp ++ local LAST_ERR=/tmp/tmp.kpFQJrd5Mm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hnQiuDXeLe ++ cat /tmp/tmp.kpFQJrd5Mm ++ rm /tmp/tmp.hnQiuDXeLe /tmp/tmp.kpFQJrd5Mm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cbvGaMBAMB +++ mktemp ++ local LAST_ERR=/tmp/tmp.0u4PxRwdGY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cbvGaMBAMB ++ cat /tmp/tmp.0u4PxRwdGY ++ rm /tmp/tmp.cbvGaMBAMB /tmp/tmp.0u4PxRwdGY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6rzf86Xdag +++ mktemp ++ local LAST_ERR=/tmp/tmp.kUuEqS0Z6K ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6rzf86Xdag ++ cat /tmp/tmp.kUuEqS0Z6K ++ rm /tmp/tmp.6rzf86Xdag /tmp/tmp.kUuEqS0Z6K ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + wait_for_pbm_operations some-name + local cluster=some-name + set +o xtrace waiting for PBM operation to finish....OK + [[ true == true ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:29:08+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.qZiyRuQL91 +++ mktemp ++ local LAST_ERR=/tmp/tmp.586iB1FRzs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qZiyRuQL91 ++ cat /tmp/tmp.586iB1FRzs ++ rm /tmp/tmp.qZiyRuQL91 /tmp/tmp.586iB1FRzs ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.FYebBSlHR5 ++ mktemp + local LAST_ERR=/tmp/tmp.OkXY78Cpg4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FYebBSlHR5 + cat /tmp/tmp.OkXY78Cpg4 + rm /tmp/tmp.FYebBSlHR5 /tmp/tmp.OkXY78Cpg4 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:29:11+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.nIvcEwVQ2S +++ mktemp ++ local LAST_ERR=/tmp/tmp.pz9RedDSSA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nIvcEwVQ2S ++ cat /tmp/tmp.pz9RedDSSA ++ rm /tmp/tmp.nIvcEwVQ2S /tmp/tmp.pz9RedDSSA ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.IXRLfesPsU ++ mktemp + local LAST_ERR=/tmp/tmp.BH2KV37Ske + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IXRLfesPsU + cat /tmp/tmp.BH2KV37Ske + rm /tmp/tmp.IXRLfesPsU /tmp/tmp.BH2KV37Ske + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:29:13+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.G6HzgYrUdX +++ mktemp ++ local LAST_ERR=/tmp/tmp.j9gmcGsWsO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.G6HzgYrUdX ++ cat /tmp/tmp.j9gmcGsWsO ++ rm /tmp/tmp.G6HzgYrUdX /tmp/tmp.j9gmcGsWsO ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.az7fhU8YZF ++ mktemp + local LAST_ERR=/tmp/tmp.Xt75H34u4n + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.az7fhU8YZF + cat /tmp/tmp.Xt75H34u4n + rm /tmp/tmp.az7fhU8YZF /tmp/tmp.Xt75H34u4n + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + run_restore backup-azure-blob + local backup_name=backup-azure-blob + log 'drop collection' + set +o xtrace [2025-10-18T12:29:15+0000] drop collection + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vwd3IMgtMx +++ mktemp ++ local LAST_ERR=/tmp/tmp.tvavFKGwp2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vwd3IMgtMx ++ cat /tmp/tmp.tvavFKGwp2 ++ rm /tmp/tmp.vwd3IMgtMx /tmp/tmp.tvavFKGwp2 ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.1jhIhAzbOX ++ mktemp + local LAST_ERR=/tmp/tmp.tz7Qk5qL1w + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1jhIhAzbOX Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fcdce26d-c71f-4041-9e24-825ce916e045") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.tz7Qk5qL1w + rm /tmp/tmp.1jhIhAzbOX /tmp/tmp.tz7Qk5qL1w + return 0 + log 'check backup and restore -- backup-azure-blob' + set +o xtrace [2025-10-18T12:29:18+0000] check backup and restore -- backup-azure-blob + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-azure-blob/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-azure-blob/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.oCWDGVdLhN ++ mktemp + local LAST_ERR=/tmp/tmp.9VDZ5YAndl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oCWDGVdLhN perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created + cat /tmp/tmp.9VDZ5YAndl + rm /tmp/tmp.oCWDGVdLhN /tmp/tmp.9VDZ5YAndl + return 0 + run_recovery_check backup-azure-blob + local backup_name=backup-azure-blob + local compare_suffix=_restore + local base=true + wait_restore backup-azure-blob some-name requested 0 3000 + local backup_name=backup-azure-blob + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=3000 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-azure-blob object to be created.OK Waiting psmdb-restore/restore-backup-azure-blob to reach state "requested" ...OK after 2 minutes + [[ 0 -eq 1 ]] + echo + compare_kubectl statefulset/some-name-rs0 _restore + local resource=statefulset/some-name-rs0 + local postfix=_restore + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml + local new_result=/tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-incremental-31953", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.TARgIDFvRr ++ mktemp + local LAST_ERR=/tmp/tmp.A4HvAKGVwF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TARgIDFvRr + cat /tmp/tmp.A4HvAKGVwF + rm /tmp/tmp.TARgIDFvRr /tmp/tmp.A4HvAKGVwF + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2025-10-18T12:31:26+0000] compare_kubectl: statefulset/some-name-rs0 OK + wait_restore backup-azure-blob some-name ready 0 1800 + local backup_name=backup-azure-blob + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1800 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-azure-blob object to be created.OK Waiting psmdb-restore/restore-backup-azure-blob to reach state "ready" ..OK after 1 minutes + [[ 0 -eq 1 ]] ++ kubectl_bin get psmdb some-name -o yaml ++ yq '.metadata.annotations."percona.com/resync-pbm"' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j57crd4Qhq +++ mktemp ++ local LAST_ERR=/tmp/tmp.UmaD1yA1Pu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o yaml ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.j57crd4Qhq ++ cat /tmp/tmp.UmaD1yA1Pu ++ rm /tmp/tmp.j57crd4Qhq /tmp/tmp.UmaD1yA1Pu ++ return 0 + '[' true == null ']' + echo + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EypWsKdcko +++ mktemp ++ local LAST_ERR=/tmp/tmp.fjmpS3qkjZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EypWsKdcko ++ cat /tmp/tmp.fjmpS3qkjZ ++ rm /tmp/tmp.EypWsKdcko /tmp/tmp.fjmpS3qkjZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AakHgictjD +++ mktemp ++ local LAST_ERR=/tmp/tmp.9KmpCLsnCA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AakHgictjD ++ cat /tmp/tmp.9KmpCLsnCA ++ rm /tmp/tmp.AakHgictjD /tmp/tmp.9KmpCLsnCA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nqJH108KCg +++ mktemp ++ local LAST_ERR=/tmp/tmp.07ZOzQiQQv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nqJH108KCg ++ cat /tmp/tmp.07ZOzQiQQv ++ rm /tmp/tmp.nqJH108KCg /tmp/tmp.07ZOzQiQQv ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yYlWNixN9i +++ mktemp ++ local LAST_ERR=/tmp/tmp.KJgStV3pRM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yYlWNixN9i ++ cat /tmp/tmp.KJgStV3pRM ++ rm /tmp/tmp.yYlWNixN9i /tmp/tmp.KJgStV3pRM ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xnp8tKgF5W +++ mktemp ++ local LAST_ERR=/tmp/tmp.G6MDVhGlt4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Xnp8tKgF5W ++ cat /tmp/tmp.G6MDVhGlt4 ++ rm /tmp/tmp.Xnp8tKgF5W /tmp/tmp.G6MDVhGlt4 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zYRVT6cyLF +++ mktemp ++ local LAST_ERR=/tmp/tmp.lNZ8lkpzMb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zYRVT6cyLF ++ cat /tmp/tmp.lNZ8lkpzMb ++ rm /tmp/tmp.zYRVT6cyLF /tmp/tmp.lNZ8lkpzMb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D2J50ntC4W +++ mktemp ++ local LAST_ERR=/tmp/tmp.sQ04T7zTHG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.D2J50ntC4W ++ cat /tmp/tmp.sQ04T7zTHG ++ rm /tmp/tmp.D2J50ntC4W /tmp/tmp.sQ04T7zTHG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xO8rG2PcCc +++ mktemp ++ local LAST_ERR=/tmp/tmp.28TYculoDs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xO8rG2PcCc ++ cat /tmp/tmp.28TYculoDs ++ rm /tmp/tmp.xO8rG2PcCc /tmp/tmp.28TYculoDs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.49LtZfoTQv +++ mktemp ++ local LAST_ERR=/tmp/tmp.NAXbCvXWdE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.49LtZfoTQv ++ cat /tmp/tmp.NAXbCvXWdE ++ rm /tmp/tmp.49LtZfoTQv /tmp/tmp.NAXbCvXWdE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 9 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JAjydz3Aiy +++ mktemp ++ local LAST_ERR=/tmp/tmp.rCHSqs5Is2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JAjydz3Aiy ++ cat /tmp/tmp.rCHSqs5Is2 ++ rm /tmp/tmp.JAjydz3Aiy /tmp/tmp.rCHSqs5Is2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 10 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BaQwhbwvAl +++ mktemp ++ local LAST_ERR=/tmp/tmp.vTwkRVJeMV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BaQwhbwvAl ++ cat /tmp/tmp.vTwkRVJeMV ++ rm /tmp/tmp.BaQwhbwvAl /tmp/tmp.vTwkRVJeMV ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + wait_for_pbm_operations some-name + local cluster=some-name + set +o xtrace waiting for PBM operation to finish....OK + [[ true == true ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:35:33+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.wziOEPULSe +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lh4kPCJxX0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wziOEPULSe ++ cat /tmp/tmp.Lh4kPCJxX0 ++ rm /tmp/tmp.wziOEPULSe /tmp/tmp.Lh4kPCJxX0 ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.T0enc1mGpX ++ mktemp + local LAST_ERR=/tmp/tmp.oYCU5ehOqF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.T0enc1mGpX + cat /tmp/tmp.oYCU5ehOqF + rm /tmp/tmp.T0enc1mGpX /tmp/tmp.oYCU5ehOqF + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:35:36+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m6bJlJ74ht egrep: warning: egrep is obsolescent; using grep -E +++ mktemp ++ local LAST_ERR=/tmp/tmp.RgPaJWZidZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.m6bJlJ74ht ++ cat /tmp/tmp.RgPaJWZidZ ++ rm /tmp/tmp.m6bJlJ74ht /tmp/tmp.RgPaJWZidZ ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.66x96lVCAj ++ mktemp + local LAST_ERR=/tmp/tmp.vnfAoK5xiR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.66x96lVCAj + cat /tmp/tmp.vnfAoK5xiR + rm /tmp/tmp.66x96lVCAj /tmp/tmp.vnfAoK5xiR + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:35:39+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.Wiz4k5yLpr +++ mktemp ++ local LAST_ERR=/tmp/tmp.0HrhSFs7Sl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Wiz4k5yLpr ++ cat /tmp/tmp.0HrhSFs7Sl ++ rm /tmp/tmp.Wiz4k5yLpr /tmp/tmp.0HrhSFs7Sl ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.DUWp0aPXy5 ++ mktemp + local LAST_ERR=/tmp/tmp.0yaL9epqFq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DUWp0aPXy5 + cat /tmp/tmp.0yaL9epqFq + rm /tmp/tmp.DUWp0aPXy5 /tmp/tmp.0yaL9epqFq + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + run_restore backup-minio-not-base + local backup_name=backup-minio-not-base + log 'drop collection' + set +o xtrace [2025-10-18T12:35:42+0000] drop collection + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N0LGGSFEhr +++ mktemp ++ local LAST_ERR=/tmp/tmp.nTFKNXhYqA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N0LGGSFEhr ++ cat /tmp/tmp.nTFKNXhYqA ++ rm /tmp/tmp.N0LGGSFEhr /tmp/tmp.nTFKNXhYqA ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.DjCJgEsVKz ++ mktemp + local LAST_ERR=/tmp/tmp.DER5IoMETr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DjCJgEsVKz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("76c5632d-6e14-4d52-9cce-503436de51e7") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.DER5IoMETr + rm /tmp/tmp.DjCJgEsVKz /tmp/tmp.DER5IoMETr + return 0 + log 'check backup and restore -- backup-minio-not-base' + set +o xtrace [2025-10-18T12:35:44+0000] check backup and restore -- backup-minio-not-base + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-not-base/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-not-base/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.zSw9obr8IB ++ mktemp + local LAST_ERR=/tmp/tmp.G62f7uK1bl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zSw9obr8IB perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-not-base created + cat /tmp/tmp.G62f7uK1bl + rm /tmp/tmp.zSw9obr8IB /tmp/tmp.G62f7uK1bl + return 0 + run_recovery_check backup-minio-not-base '' false + local backup_name=backup-minio-not-base + local compare_suffix=_restore + local base=false + wait_restore backup-minio-not-base some-name requested 0 3000 + local backup_name=backup-minio-not-base + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=3000 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-not-base object to be created.OK Waiting psmdb-restore/restore-backup-minio-not-base to reach state "requested" ..OK after 1 minutes + [[ 0 -eq 1 ]] + echo + compare_kubectl statefulset/some-name-rs0 _restore + local resource=statefulset/some-name-rs0 + local postfix=_restore + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml + local new_result=/tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-incremental-31953", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.d95YQo7n1S ++ mktemp + local LAST_ERR=/tmp/tmp.cOeeaOmgCx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.d95YQo7n1S + cat /tmp/tmp.cOeeaOmgCx + rm /tmp/tmp.d95YQo7n1S /tmp/tmp.cOeeaOmgCx + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2025-10-18T12:37:45+0000] compare_kubectl: statefulset/some-name-rs0 OK + wait_restore backup-minio-not-base some-name ready 0 1800 + local backup_name=backup-minio-not-base + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1800 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-not-base object to be created.OK Waiting psmdb-restore/restore-backup-minio-not-base to reach state "ready" ..OK after 1 minutes + [[ 0 -eq 1 ]] ++ kubectl_bin get psmdb some-name -o yaml ++ yq '.metadata.annotations."percona.com/resync-pbm"' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qv3dNIebKd +++ mktemp ++ local LAST_ERR=/tmp/tmp.c1yQSBahNb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o yaml ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Qv3dNIebKd ++ cat /tmp/tmp.c1yQSBahNb ++ rm /tmp/tmp.Qv3dNIebKd /tmp/tmp.c1yQSBahNb ++ return 0 + '[' true == null ']' + echo + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iiFvpe72kM +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZqykX94Lyq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iiFvpe72kM ++ cat /tmp/tmp.ZqykX94Lyq ++ rm /tmp/tmp.iiFvpe72kM /tmp/tmp.ZqykX94Lyq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SIlJGDz61d +++ mktemp ++ local LAST_ERR=/tmp/tmp.6bU2IUUPso ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SIlJGDz61d ++ cat /tmp/tmp.6bU2IUUPso ++ rm /tmp/tmp.SIlJGDz61d /tmp/tmp.6bU2IUUPso ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xT6hgSRRK1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HJ6atSbz9r ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xT6hgSRRK1 ++ cat /tmp/tmp.HJ6atSbz9r ++ rm /tmp/tmp.xT6hgSRRK1 /tmp/tmp.HJ6atSbz9r ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EpRLuhzziC +++ mktemp ++ local LAST_ERR=/tmp/tmp.UTDSJ9p4kS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EpRLuhzziC ++ cat /tmp/tmp.UTDSJ9p4kS ++ rm /tmp/tmp.EpRLuhzziC /tmp/tmp.UTDSJ9p4kS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zbZOrALD74 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nwUSyFwQLL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zbZOrALD74 ++ cat /tmp/tmp.nwUSyFwQLL ++ rm /tmp/tmp.zbZOrALD74 /tmp/tmp.nwUSyFwQLL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hSpjcZqSXV +++ mktemp ++ local LAST_ERR=/tmp/tmp.dP6CVSEMk9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hSpjcZqSXV ++ cat /tmp/tmp.dP6CVSEMk9 ++ rm /tmp/tmp.hSpjcZqSXV /tmp/tmp.dP6CVSEMk9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BlLEQ2DfKL +++ mktemp ++ local LAST_ERR=/tmp/tmp.MrEDEFUjz3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BlLEQ2DfKL ++ cat /tmp/tmp.MrEDEFUjz3 ++ rm /tmp/tmp.BlLEQ2DfKL /tmp/tmp.MrEDEFUjz3 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + wait_for_pbm_operations some-name + local cluster=some-name + set +o xtrace waiting for PBM operation to finish....OK + [[ false == true ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 -not-base + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local postfix=-not-base + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:41:04+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.GMBHf8RO3n +++ mktemp ++ local LAST_ERR=/tmp/tmp.UX7zwFjGun ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GMBHf8RO3n ++ cat /tmp/tmp.UX7zwFjGun ++ rm /tmp/tmp.GMBHf8RO3n /tmp/tmp.UX7zwFjGun ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Jn1hbCHUsi ++ mktemp + local LAST_ERR=/tmp/tmp.sfdq0rBMKv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Jn1hbCHUsi + cat /tmp/tmp.sfdq0rBMKv + rm /tmp/tmp.Jn1hbCHUsi /tmp/tmp.sfdq0rBMKv + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find-not-base.json /tmp/tmp.yrUrhm9oZh/find-not-base + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 -not-base + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local postfix=-not-base + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:41:07+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.rL8je5B1yj +++ mktemp ++ local LAST_ERR=/tmp/tmp.NF2tN5P7ax ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rL8je5B1yj ++ cat /tmp/tmp.NF2tN5P7ax ++ rm /tmp/tmp.rL8je5B1yj /tmp/tmp.NF2tN5P7ax ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.UKNOuD4lV1 ++ mktemp + local LAST_ERR=/tmp/tmp.jACc8gUUYF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UKNOuD4lV1 + cat /tmp/tmp.jACc8gUUYF + rm /tmp/tmp.UKNOuD4lV1 /tmp/tmp.jACc8gUUYF + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find-not-base.json /tmp/tmp.yrUrhm9oZh/find-not-base + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 -not-base + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local postfix=-not-base + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:41:09+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.Ljrrc6y4Ab +++ mktemp ++ local LAST_ERR=/tmp/tmp.9VaSQ7bOgi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ljrrc6y4Ab ++ cat /tmp/tmp.9VaSQ7bOgi ++ rm /tmp/tmp.Ljrrc6y4Ab /tmp/tmp.9VaSQ7bOgi ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rPproP0rD5 ++ mktemp + local LAST_ERR=/tmp/tmp.x107svYvDX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rPproP0rD5 + cat /tmp/tmp.x107svYvDX + rm /tmp/tmp.rPproP0rD5 /tmp/tmp.x107svYvDX + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find-not-base.json /tmp/tmp.yrUrhm9oZh/find-not-base + run_restore backup-minio + local backup_name=backup-minio + log 'drop collection' + set +o xtrace [2025-10-18T12:41:11+0000] drop collection + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LXx1Aa5ABt +++ mktemp ++ local LAST_ERR=/tmp/tmp.aHe7MX2uiX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LXx1Aa5ABt ++ cat /tmp/tmp.aHe7MX2uiX ++ rm /tmp/tmp.LXx1Aa5ABt /tmp/tmp.aHe7MX2uiX ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.coYIB8LZGs ++ mktemp + local LAST_ERR=/tmp/tmp.2tRweqnLYh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.coYIB8LZGs Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("9b95916d-6755-4d4b-b8f2-f0d204f884f4") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.2tRweqnLYh + rm /tmp/tmp.coYIB8LZGs /tmp/tmp.2tRweqnLYh + return 0 + log 'check backup and restore -- backup-minio' + set +o xtrace [2025-10-18T12:41:13+0000] check backup and restore -- backup-minio + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.v71RUt7KHU ++ mktemp + local LAST_ERR=/tmp/tmp.UQibvoh8En + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.v71RUt7KHU perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created + cat /tmp/tmp.UQibvoh8En + rm /tmp/tmp.v71RUt7KHU /tmp/tmp.UQibvoh8En + return 0 + run_recovery_check backup-minio + local backup_name=backup-minio + local compare_suffix=_restore + local base=true + wait_restore backup-minio some-name requested 0 3000 + local backup_name=backup-minio + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=3000 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio object to be created.OK Waiting psmdb-restore/restore-backup-minio to reach state "requested" ..OK after 1 minutes + [[ 0 -eq 1 ]] + echo + compare_kubectl statefulset/some-name-rs0 _restore + local resource=statefulset/some-name-rs0 + local postfix=_restore + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml + local new_result=/tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-incremental-31953", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.qV8PqAAITo ++ mktemp + local LAST_ERR=/tmp/tmp.B1Uyyxcgh1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qV8PqAAITo + cat /tmp/tmp.B1Uyyxcgh1 + rm /tmp/tmp.qV8PqAAITo /tmp/tmp.B1Uyyxcgh1 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore.yml /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2025-10-18T12:43:14+0000] compare_kubectl: statefulset/some-name-rs0 OK + wait_restore backup-minio some-name ready 0 1800 + local backup_name=backup-minio + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1800 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio object to be created.OK Waiting psmdb-restore/restore-backup-minio to reach state "ready" ..OK after 1 minutes + [[ 0 -eq 1 ]] ++ kubectl_bin get psmdb some-name -o yaml ++ yq '.metadata.annotations."percona.com/resync-pbm"' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auXeDyT0kY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hlg6JXR8Ro ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o yaml ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.auXeDyT0kY ++ cat /tmp/tmp.Hlg6JXR8Ro ++ rm /tmp/tmp.auXeDyT0kY /tmp/tmp.Hlg6JXR8Ro ++ return 0 + '[' true == null ']' + echo + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wCDiYl4iK4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1NAIbLpE3r ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wCDiYl4iK4 ++ cat /tmp/tmp.1NAIbLpE3r ++ rm /tmp/tmp.wCDiYl4iK4 /tmp/tmp.1NAIbLpE3r ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yeL2vFA1HV +++ mktemp ++ local LAST_ERR=/tmp/tmp.z2zN2NFnXH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yeL2vFA1HV ++ cat /tmp/tmp.z2zN2NFnXH ++ rm /tmp/tmp.yeL2vFA1HV /tmp/tmp.z2zN2NFnXH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IpeEjScKCH +++ mktemp ++ local LAST_ERR=/tmp/tmp.kjkVagUJGE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IpeEjScKCH ++ cat /tmp/tmp.kjkVagUJGE ++ rm /tmp/tmp.IpeEjScKCH /tmp/tmp.kjkVagUJGE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7e1j2odpsj +++ mktemp ++ local LAST_ERR=/tmp/tmp.zCQ93iJdt2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7e1j2odpsj ++ cat /tmp/tmp.zCQ93iJdt2 ++ rm /tmp/tmp.7e1j2odpsj /tmp/tmp.zCQ93iJdt2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LBeRZYlPoy +++ mktemp ++ local LAST_ERR=/tmp/tmp.WIX04Nvj4h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LBeRZYlPoy ++ cat /tmp/tmp.WIX04Nvj4h ++ rm /tmp/tmp.LBeRZYlPoy /tmp/tmp.WIX04Nvj4h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fp3Ra8wSJV +++ mktemp ++ local LAST_ERR=/tmp/tmp.V2AcC5EpzF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Fp3Ra8wSJV ++ cat /tmp/tmp.V2AcC5EpzF ++ rm /tmp/tmp.Fp3Ra8wSJV /tmp/tmp.V2AcC5EpzF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kYWptQv8t2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KEijV85vuW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kYWptQv8t2 ++ cat /tmp/tmp.KEijV85vuW ++ rm /tmp/tmp.kYWptQv8t2 /tmp/tmp.KEijV85vuW ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + wait_for_pbm_operations some-name + local cluster=some-name + set +o xtrace waiting for PBM operation to finish...OK + [[ true == true ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:46:21+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.KFdJN5qq7V +++ mktemp ++ local LAST_ERR=/tmp/tmp.v17yKDzeO8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KFdJN5qq7V ++ cat /tmp/tmp.v17yKDzeO8 ++ rm /tmp/tmp.KFdJN5qq7V /tmp/tmp.v17yKDzeO8 ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.da55XNuIQo ++ mktemp + local LAST_ERR=/tmp/tmp.UmbWzhRZBv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.da55XNuIQo + cat /tmp/tmp.UmbWzhRZBv + rm /tmp/tmp.da55XNuIQo /tmp/tmp.UmbWzhRZBv + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:46:23+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.YRS3LwU2IP +++ mktemp ++ local LAST_ERR=/tmp/tmp.b94VYUzSib ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YRS3LwU2IP ++ cat /tmp/tmp.b94VYUzSib ++ rm /tmp/tmp.YRS3LwU2IP /tmp/tmp.b94VYUzSib ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.yGxf6tpgyz ++ mktemp + local LAST_ERR=/tmp/tmp.ZSduy69T8Y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yGxf6tpgyz + cat /tmp/tmp.ZSduy69T8Y + rm /tmp/tmp.yGxf6tpgyz /tmp/tmp.ZSduy69T8Y + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:46:26+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.BGtTHpHaf9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8brNtkfTcl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BGtTHpHaf9 ++ cat /tmp/tmp.8brNtkfTcl ++ rm /tmp/tmp.BGtTHpHaf9 /tmp/tmp.8brNtkfTcl ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.EUFIxe3rOK ++ mktemp + local LAST_ERR=/tmp/tmp.Byx0h4IRIj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EUFIxe3rOK + cat /tmp/tmp.Byx0h4IRIj + rm /tmp/tmp.EUFIxe3rOK /tmp/tmp.Byx0h4IRIj + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + desc 'Testing with arbiter and non-voting nodes' + set +o xtrace ----------------------------------------------------------------------------------- Testing with arbiter and non-voting nodes ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/some-name-arbiter-nv.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/some-name-arbiter-nv.yml + kubectl_bin apply -f - ++ mktemp + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/some-name-arbiter-nv.yml + yq eval '(.spec | select(has("pmm"))).pmm.image = "percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-2092-7720117d"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.UhCR8B8cDy ++ mktemp + local LAST_ERR=/tmp/tmp.DxtbylErIF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UhCR8B8cDy perconaservermongodb.psmdb.percona.com/some-name configured + cat /tmp/tmp.DxtbylErIF + rm /tmp/tmp.UhCR8B8cDy /tmp/tmp.DxtbylErIF + return 0 + log 'check if all pods started' + set +o xtrace [2025-10-18T12:46:31+0000] check if all pods started + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NMXjyjGsM3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.c6Fg0kWxfW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NMXjyjGsM3 ++ cat /tmp/tmp.c6Fg0kWxfW ++ rm /tmp/tmp.NMXjyjGsM3 /tmp/tmp.c6Fg0kWxfW ++ return 0 + [[ true == \t\r\u\e ]] + wait_pod some-name-rs0-arbiter-0 + local pod=some-name-rs0-arbiter-0 + set +o xtrace waiting for pod/some-name-rs0-arbiter-0 to be ready.....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NgoXnBZ5nF +++ mktemp ++ local LAST_ERR=/tmp/tmp.YFeMualuGD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NgoXnBZ5nF ++ cat /tmp/tmp.YFeMualuGD ++ rm /tmp/tmp.NgoXnBZ5nF /tmp/tmp.YFeMualuGD ++ return 0 + [[ true == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.size}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AKL5QcD4Jy +++ mktemp ++ local LAST_ERR=/tmp/tmp.WB87IYGLeF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.size}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AKL5QcD4Jy ++ cat /tmp/tmp.WB87IYGLeF ++ rm /tmp/tmp.AKL5QcD4Jy /tmp/tmp.WB87IYGLeF ++ return 0 + last_pod=0 ++ seq 0 0 + for i in $(seq 0 $last_pod) + wait_pod some-name-rs0-nv-0 + local pod=some-name-rs0-nv-0 + set +o xtrace waiting for pod/some-name-rs0-nv-0 to be ready..OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mbucezipRS +++ mktemp ++ local LAST_ERR=/tmp/tmp.kYPyx9ksOR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mbucezipRS ++ cat /tmp/tmp.kYPyx9ksOR ++ rm /tmp/tmp.mbucezipRS /tmp/tmp.kYPyx9ksOR ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness. + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.twEJ9n0KYN +++ mktemp ++ local LAST_ERR=/tmp/tmp.0odseBDfIo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.twEJ9n0KYN ++ cat /tmp/tmp.0odseBDfIo ++ rm /tmp/tmp.twEJ9n0KYN /tmp/tmp.0odseBDfIo ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + log 'running backups' + set +o xtrace [2025-10-18T12:47:14+0000] running backups + backup_name_minio=backup-minio-arbiter-nv + run_backup minio backup-minio-arbiter-nv + local storage=minio + local backup_name=backup-minio-arbiter-nv + local base=true + local backup_type=incremental + [[ true == \t\r\u\e ]] + backup_type=incremental-base + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/backup.yml + /usr/sbin/sed -e 's/name:/name: backup-minio-arbiter-nv/' + /usr/sbin/sed -e 's/storageName:/storageName: minio/' + yq '.spec.type="incremental-base"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.7o7Xbou0Uf ++ mktemp + local LAST_ERR=/tmp/tmp.bio5iNfIu2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7o7Xbou0Uf perconaservermongodbbackup.psmdb.percona.com/backup-minio-arbiter-nv created + cat /tmp/tmp.bio5iNfIu2 + rm /tmp/tmp.7o7Xbou0Uf /tmp/tmp.bio5iNfIu2 + return 0 + wait_backup backup-minio-arbiter-nv + local backup_name=backup-minio-arbiter-nv + local target_state=ready + set +o xtrace waiting for backup-minio-arbiter-nv to reach ready state......OK + check_backup_in_storage backup-minio-arbiter-nv minio rs0 + local backup=backup-minio-arbiter-nv + local storage_type=minio + local replset=rs0 + local file=filelist.pbm + local endpoint + case ${storage_type} in + endpoint=minio-service ++ get_backup_dest backup-minio-arbiter-nv ++ local backup_name=backup-minio-arbiter-nv ++ /usr/sbin/sed 's|https://engk8soperators.blob.core.windows.net/||' ++ kubectl_bin get psmdb-backup backup-minio-arbiter-nv -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ sed 's|gs://||' ++ local LAST_OUT=/tmp/tmp.e92TVnDRTh +++ mktemp ++ local LAST_ERR=/tmp/tmp.Oo9RiGnT5N ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup backup-minio-arbiter-nv -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.e92TVnDRTh ++ cat /tmp/tmp.Oo9RiGnT5N ++ rm /tmp/tmp.e92TVnDRTh /tmp/tmp.Oo9RiGnT5N ++ return 0 + backup_dest=operator-testing/2025-10-18T12:47:16Z + [[ minio == \m\i\n\i\o ]] + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-10-18T12:47:16Z/rs0/filelist.pbm + grep filelist.pbm ++ mktemp + local LAST_OUT=/tmp/tmp.enhSco5ent ++ mktemp + local LAST_ERR=/tmp/tmp.klApyFnL6k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-10-18T12:47:16Z/rs0/filelist.pbm + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.enhSco5ent + cat /tmp/tmp.klApyFnL6k All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: Internal error occurred: error attaching to container: failed to load task: no running task found: task ca643f2218c9e81cfe8fc7d3be6584dbffe0b01bedb396542deb927439058b10 not found: not found + rm /tmp/tmp.enhSco5ent /tmp/tmp.klApyFnL6k + return 0 2025-10-18 12:47:24 13846 filelist.pbm + run_restore backup-minio-arbiter-nv + local backup_name=backup-minio-arbiter-nv + log 'drop collection' + set +o xtrace [2025-10-18T12:47:36+0000] drop collection + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zf1tPWivuN +++ mktemp ++ local LAST_ERR=/tmp/tmp.rdx2aV4R1p ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Zf1tPWivuN ++ cat /tmp/tmp.rdx2aV4R1p ++ rm /tmp/tmp.Zf1tPWivuN /tmp/tmp.rdx2aV4R1p ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.0aaWl9YRBo ++ mktemp + local LAST_ERR=/tmp/tmp.aKmuPbblhG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0aaWl9YRBo Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-3.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-arbiter-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-nv-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("c471c24c-422b-4a5c-bb93-be9469ab866a") } Percona Server for MongoDB server version: v7.0.24-13 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.aKmuPbblhG + rm /tmp/tmp.0aaWl9YRBo /tmp/tmp.aKmuPbblhG + return 0 + log 'check backup and restore -- backup-minio-arbiter-nv' + set +o xtrace [2025-10-18T12:47:38+0000] check backup and restore -- backup-minio-arbiter-nv + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/conf/restore.yml + /usr/sbin/sed -e 's/name:/name: restore-backup-minio-arbiter-nv/' + /usr/sbin/sed -e 's/backupName:/backupName: backup-minio-arbiter-nv/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.R7PLRV2m6Q ++ mktemp + local LAST_ERR=/tmp/tmp.qPNbDHHlrv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.R7PLRV2m6Q perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-arbiter-nv created + cat /tmp/tmp.qPNbDHHlrv + rm /tmp/tmp.R7PLRV2m6Q /tmp/tmp.qPNbDHHlrv + return 0 + run_recovery_check backup-minio-arbiter-nv _restore-arbiter-nv + local backup_name=backup-minio-arbiter-nv + local compare_suffix=_restore-arbiter-nv + local base=true + wait_restore backup-minio-arbiter-nv some-name requested 0 3000 + local backup_name=backup-minio-arbiter-nv + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=3000 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-arbiter-nv object to be created.OK Waiting psmdb-restore/restore-backup-minio-arbiter-nv to reach state "requested" ....OK after 3 minutes + [[ 0 -eq 1 ]] + echo + compare_kubectl statefulset/some-name-rs0 _restore-arbiter-nv + local resource=statefulset/some-name-rs0 + local postfix=_restore-arbiter-nv + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-arbiter-nv.yml + local new_result=/tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-arbiter-nv-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-incremental-31953", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.NQpoHJYy9v ++ mktemp + local LAST_ERR=/tmp/tmp.3D81HKhP5y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NQpoHJYy9v + cat /tmp/tmp.3D81HKhP5y + rm /tmp/tmp.NQpoHJYy9v /tmp/tmp.3D81HKhP5y + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-arbiter-nv.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/statefulset_some-name-rs0_restore-arbiter-nv.yml /tmp/tmp.yrUrhm9oZh/statefulset_some-name-rs0.yml + log 'compare_kubectl: statefulset/some-name-rs0 OK' + set +o xtrace [2025-10-18T12:51:20+0000] compare_kubectl: statefulset/some-name-rs0 OK + wait_restore backup-minio-arbiter-nv some-name ready 0 1800 + local backup_name=backup-minio-arbiter-nv + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1800 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-arbiter-nv object to be created.OK Waiting psmdb-restore/restore-backup-minio-arbiter-nv to reach state "ready" ..OK after 1 minutes + [[ 0 -eq 1 ]] ++ kubectl_bin get psmdb some-name -o yaml ++ yq '.metadata.annotations."percona.com/resync-pbm"' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5us2lUTAqx +++ mktemp ++ local LAST_ERR=/tmp/tmp.XDuIAl9VDJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o yaml ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5us2lUTAqx ++ cat /tmp/tmp.XDuIAl9VDJ ++ rm /tmp/tmp.5us2lUTAqx /tmp/tmp.XDuIAl9VDJ ++ return 0 + '[' true == null ']' + echo + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bDxDT0R48i +++ mktemp ++ local LAST_ERR=/tmp/tmp.OTxX52au7M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bDxDT0R48i ++ cat /tmp/tmp.OTxX52au7M ++ rm /tmp/tmp.bDxDT0R48i /tmp/tmp.OTxX52au7M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q0YvBrx4g1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IWJyPCRJEv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.q0YvBrx4g1 ++ cat /tmp/tmp.IWJyPCRJEv ++ rm /tmp/tmp.q0YvBrx4g1 /tmp/tmp.IWJyPCRJEv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eQH90XZUgx +++ mktemp ++ local LAST_ERR=/tmp/tmp.fdT5s2eM4Y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eQH90XZUgx ++ cat /tmp/tmp.fdT5s2eM4Y ++ rm /tmp/tmp.eQH90XZUgx /tmp/tmp.fdT5s2eM4Y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vKCaPkdBNu +++ mktemp ++ local LAST_ERR=/tmp/tmp.qcv5lt5qvZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vKCaPkdBNu ++ cat /tmp/tmp.qcv5lt5qvZ ++ rm /tmp/tmp.vKCaPkdBNu /tmp/tmp.qcv5lt5qvZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hmh0vH4IzV +++ mktemp ++ local LAST_ERR=/tmp/tmp.ocFRKpoV44 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hmh0vH4IzV ++ cat /tmp/tmp.ocFRKpoV44 ++ rm /tmp/tmp.hmh0vH4IzV /tmp/tmp.ocFRKpoV44 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eGT0jJwGXX +++ mktemp ++ local LAST_ERR=/tmp/tmp.AzHXBMJPSi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eGT0jJwGXX ++ cat /tmp/tmp.AzHXBMJPSi ++ rm /tmp/tmp.eGT0jJwGXX /tmp/tmp.AzHXBMJPSi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EOvvpdPFgn +++ mktemp ++ local LAST_ERR=/tmp/tmp.KlSLvaxjcD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EOvvpdPFgn ++ cat /tmp/tmp.KlSLvaxjcD ++ rm /tmp/tmp.EOvvpdPFgn /tmp/tmp.KlSLvaxjcD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SkKKrmt1Tl +++ mktemp ++ local LAST_ERR=/tmp/tmp.iB6MTepPcS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SkKKrmt1Tl ++ cat /tmp/tmp.iB6MTepPcS ++ rm /tmp/tmp.SkKKrmt1Tl /tmp/tmp.iB6MTepPcS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IS545k58t7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LC0LR03EqQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IS545k58t7 ++ cat /tmp/tmp.LC0LR03EqQ ++ rm /tmp/tmp.IS545k58t7 /tmp/tmp.LC0LR03EqQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 9 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UW21dAhzGc +++ mktemp ++ local LAST_ERR=/tmp/tmp.r7Xg4NVmOH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UW21dAhzGc ++ cat /tmp/tmp.r7Xg4NVmOH ++ rm /tmp/tmp.UW21dAhzGc /tmp/tmp.r7Xg4NVmOH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 10 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hREWZoZX1d +++ mktemp ++ local LAST_ERR=/tmp/tmp.XUGthWYZd9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hREWZoZX1d ++ cat /tmp/tmp.XUGthWYZd9 ++ rm /tmp/tmp.hREWZoZX1d /tmp/tmp.XUGthWYZd9 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo .OK .OK + wait_for_pbm_operations some-name + local cluster=some-name + set +o xtrace waiting for PBM operation to finish....OK + [[ true == true ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:55:19+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.hmxfeHcqor +++ mktemp ++ local LAST_ERR=/tmp/tmp.nzHJ04YAYA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hmxfeHcqor ++ cat /tmp/tmp.nzHJ04YAYA ++ rm /tmp/tmp.hmxfeHcqor /tmp/tmp.nzHJ04YAYA ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.m7B2rVLdAh ++ mktemp + local LAST_ERR=/tmp/tmp.Cf3OWPmvOl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.m7B2rVLdAh + cat /tmp/tmp.Cf3OWPmvOl + rm /tmp/tmp.m7B2rVLdAh /tmp/tmp.Cf3OWPmvOl + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:55:22+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.WgzmLobJd9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PLCQIMVhCR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WgzmLobJd9 ++ cat /tmp/tmp.PLCQIMVhCR ++ rm /tmp/tmp.WgzmLobJd9 /tmp/tmp.PLCQIMVhCR ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LbBDk0SA8p ++ mktemp + local LAST_ERR=/tmp/tmp.IVeatnaOil + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LbBDk0SA8p + cat /tmp/tmp.IVeatnaOil + rm /tmp/tmp.LbBDk0SA8p /tmp/tmp.IVeatnaOil + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local tls=false + local 'full_command=db.test.find()' + [[ -n '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-10-18T12:55:25+0000] running db.test.find() in myApp + [[ false == \t\r\u\e ]] + mongo_command=run_mongo + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|Started a new thread for the timer service' + /usr/sbin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp egrep: warning: egrep is obsolescent; using grep -E ++ local LAST_OUT=/tmp/tmp.amAFAMxRQz +++ mktemp ++ local LAST_ERR=/tmp/tmp.mRsAKoVQYR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.amAFAMxRQz ++ cat /tmp/tmp.mRsAKoVQYR ++ rm /tmp/tmp.amAFAMxRQz /tmp/tmp.mRsAKoVQYR ++ return 0 + local client_container=psmdb-client-8f86f7874-85dcj + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ttt4DPGvzt ++ mktemp + local LAST_ERR=/tmp/tmp.tngNMJhYRz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-8f86f7874-85dcj -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-incremental-31953.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ttt4DPGvzt + cat /tmp/tmp.tngNMJhYRz + rm /tmp/tmp.ttt4DPGvzt /tmp/tmp.tngNMJhYRz + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/e2e-tests/demand-backup-incremental/compare/find.json /tmp/tmp.yrUrhm9oZh/find + destroy demand-backup-incremental-31953 + local namespace=demand-backup-incremental-31953 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.D23HhdjxqN +++ mktemp ++ local LAST_ERR=/tmp/tmp.z7PtQRJ8QT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.D23HhdjxqN ++ cat /tmp/tmp.z7PtQRJ8QT ++ rm /tmp/tmp.D23HhdjxqN /tmp/tmp.z7PtQRJ8QT ++ return 0 + '[' 6 '!=' 0 ']' + kubectl_bin get psmdb-backup ++ mktemp + local LAST_OUT=/tmp/tmp.St5lDkCUE8 ++ mktemp + local LAST_ERR=/tmp/tmp.Z1aItdfRpy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get psmdb-backup + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.St5lDkCUE8 NAME CLUSTER STORAGE DESTINATION TYPE SIZE STATUS COMPLETED AGE backup-aws-s3 some-name aws-s3 s3://operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:14:55Z incremental-base 682.35KB ready 40m 40m backup-azure-blob some-name azure-blob azure://operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:51Z incremental-base 801.97KB ready 39m 40m backup-gcp-cs some-name gcp-cs s3://operator-testing/psmdb-demand-backup-incremental/2025-10-18T12:15:28Z incremental-base 960.51KB ready 39m 40m backup-minio some-name minio s3://operator-testing/2025-10-18T12:16:21Z incremental-base 903.13KB ready 38m 39m backup-minio-arbiter-nv some-name minio s3://operator-testing/2025-10-18T12:47:16Z incremental-base 1.01MB ready 8m4s 8m13s backup-minio-not-base some-name minio s3://operator-testing/2025-10-18T12:16:56Z incremental 950.30KB ready 38m 38m + cat /tmp/tmp.Z1aItdfRpy + rm /tmp/tmp.St5lDkCUE8 /tmp/tmp.Z1aItdfRpy + return 0 + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.eLSFZXUMsh ++ mktemp + local LAST_ERR=/tmp/tmp.wMvZ1ltAoh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eLSFZXUMsh perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted from demand-backup-incremental-31953 namespace perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted from demand-backup-incremental-31953 namespace perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted from demand-backup-incremental-31953 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted from demand-backup-incremental-31953 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-arbiter-nv" deleted from demand-backup-incremental-31953 namespace perconaservermongodbbackup.psmdb.percona.com "backup-minio-not-base" deleted from demand-backup-incremental-31953 namespace + cat /tmp/tmp.wMvZ1ltAoh + rm /tmp/tmp.eLSFZXUMsh /tmp/tmp.wMvZ1ltAoh + return 0 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.lX7OZaAYdl ++ mktemp + local LAST_ERR=/tmp/tmp.otvZ7TH0vY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lX7OZaAYdl customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.otvZ7TH0vY + rm /tmp/tmp.lX7OZaAYdl /tmp/tmp.otvZ7TH0vY + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.DotzdWNA5o ++ mktemp + local LAST_ERR=/tmp/tmp.bpAJtogfMN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DotzdWNA5o + cat /tmp/tmp.bpAJtogfMN + rm /tmp/tmp.DotzdWNA5o /tmp/tmp.bpAJtogfMN + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.4OV8EceAtH ++ mktemp + local LAST_ERR=/tmp/tmp.Aor4w0HTVx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4OV8EceAtH + cat /tmp/tmp.Aor4w0HTVx + rm /tmp/tmp.4OV8EceAtH /tmp/tmp.Aor4w0HTVx + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.oVoCfzQOaP ++ mktemp + local LAST_ERR=/tmp/tmp.gofJydQTA9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oVoCfzQOaP + cat /tmp/tmp.gofJydQTA9 + rm /tmp/tmp.oVoCfzQOaP /tmp/tmp.gofJydQTA9 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.DwOMXvRf3c ++ mktemp + local LAST_ERR=/tmp/tmp.hAoIFmeleT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2092/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DwOMXvRf3c clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.hAoIFmeleT + rm /tmp/tmp.DwOMXvRf3c /tmp/tmp.hAoIFmeleT + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.zi9QbM2O8t ++ mktemp + local LAST_ERR=/tmp/tmp.1cJZJUFaWV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.zi9QbM2O8t + cat /tmp/tmp.1cJZJUFaWV Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.zi9QbM2O8t + cat /tmp/tmp.1cJZJUFaWV Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.zi9QbM2O8t + cat /tmp/tmp.1cJZJUFaWV Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.zi9QbM2O8t + cat /tmp/tmp.1cJZJUFaWV Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.18.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.zi9QbM2O8t /tmp/tmp.1cJZJUFaWV + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace demand-backup-incremental-31953 + rm -rf /tmp/tmp.yrUrhm9oZh + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.y2i1bHOfn3 + local LAST_OUT=/tmp/tmp.323ryochCR ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.Qs8iqe8la3 + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.5NotimLlMS + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace demand-backup-incremental-31953