Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/logs/multi-storage.log WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra multi-storage-23296 + local ns=multi-storage-23296 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.SJb3DcILp0 ++ mktemp + local LAST_ERR=/tmp/tmp.q8A4INUAct + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SJb3DcILp0 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.q8A4INUAct + rm /tmp/tmp.SJb3DcILp0 /tmp/tmp.q8A4INUAct + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE No resources found + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.MuXZMwrV9e ++ mktemp + local LAST_ERR=/tmp/tmp.OmUPAvNYw5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MuXZMwrV9e + cat /tmp/tmp.OmUPAvNYw5 + rm /tmp/tmp.MuXZMwrV9e /tmp/tmp.OmUPAvNYw5 + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.4B6IBS4DWx ++ mktemp + local LAST_ERR=/tmp/tmp.WnbiNidBGr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4B6IBS4DWx + cat /tmp/tmp.WnbiNidBGr + rm /tmp/tmp.4B6IBS4DWx /tmp/tmp.WnbiNidBGr + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.kan7dM71pi ++ mktemp + local LAST_ERR=/tmp/tmp.2TmGJJQCuf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kan7dM71pi + cat /tmp/tmp.2TmGJJQCuf + rm /tmp/tmp.kan7dM71pi /tmp/tmp.2TmGJJQCuf + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.ZP2VdJI1Gv ++ mktemp + local LAST_ERR=/tmp/tmp.4hkIYnrJ0e + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZP2VdJI1Gv clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.4hkIYnrJ0e + rm /tmp/tmp.ZP2VdJI1Gv /tmp/tmp.4hkIYnrJ0e + return 0 + check_crd_for_deletion PR-1926-fd8c7411 + local git_tag=PR-1926-fd8c7411 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1926-fd8c7411/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/bin/sed s/---//g ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aZp8igk5uK +++ mktemp ++ local LAST_ERR=/tmp/tmp.4rBS8OkSCv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.aZp8igk5uK ++ cat /tmp/tmp.4rBS8OkSCv Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.aZp8igk5uK ++ cat /tmp/tmp.4rBS8OkSCv Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.aZp8igk5uK ++ cat /tmp/tmp.4rBS8OkSCv Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.aZp8igk5uK ++ cat /tmp/tmp.4rBS8OkSCv Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.aZp8igk5uK /tmp/tmp.4rBS8OkSCv ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.9lZbFC1d7a + local LAST_OUT=/tmp/tmp.7S1JSQKgdN ++ mktemp + xargs kubectl delete ns ++ mktemp + local LAST_ERR=/tmp/tmp.UvkpSAOPfm + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.4yiq1w6aTj + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7S1JSQKgdN + cat /tmp/tmp.4yiq1w6aTj + rm /tmp/tmp.7S1JSQKgdN /tmp/tmp.4yiq1w6aTj + return 0 namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted namespace "multi-storage-28430" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9lZbFC1d7a namespace "psmdb-operator" deleted + cat /tmp/tmp.UvkpSAOPfm + rm /tmp/tmp.9lZbFC1d7a /tmp/tmp.UvkpSAOPfm + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.7rAIsOd3Wk ++ mktemp + local LAST_ERR=/tmp/tmp.7GrrjdSeQt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7rAIsOd3Wk + cat /tmp/tmp.7GrrjdSeQt + rm /tmp/tmp.7rAIsOd3Wk /tmp/tmp.7GrrjdSeQt + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.DpRZUihA0R ++ mktemp + local LAST_ERR=/tmp/tmp.T2c9OO5p9a + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DpRZUihA0R namespace/psmdb-operator created + cat /tmp/tmp.T2c9OO5p9a + rm /tmp/tmp.DpRZUihA0R /tmp/tmp.T2c9OO5p9a + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.2JIlr2I5cf +++ mktemp ++ local LAST_ERR=/tmp/tmp.pZd0pQcJT8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2JIlr2I5cf ++ cat /tmp/tmp.pZd0pQcJT8 ++ rm /tmp/tmp.2JIlr2I5cf /tmp/tmp.pZd0pQcJT8 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1926-fd8c7411-27-cluster1 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.RkcVmRRMgJ ++ mktemp + local LAST_ERR=/tmp/tmp.Cl7gwni9tE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1926-fd8c7411-27-cluster1 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RkcVmRRMgJ Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1926-fd8c7411-27-cluster1" modified. + cat /tmp/tmp.Cl7gwni9tE + rm /tmp/tmp.RkcVmRRMgJ /tmp/tmp.Cl7gwni9tE + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.1o6R1Kcx21 ++ mktemp + local LAST_ERR=/tmp/tmp.X91OTpQs1l + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1o6R1Kcx21 customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.X91OTpQs1l + rm /tmp/tmp.1o6R1Kcx21 /tmp/tmp.X91OTpQs1l + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: psmdb-operator^' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.r8yNVnQl6H ++ mktemp + local LAST_ERR=/tmp/tmp.cPgAsp6MVq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.r8yNVnQl6H clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.cPgAsp6MVq + rm /tmp/tmp.r8yNVnQl6H /tmp/tmp.cPgAsp6MVq + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1926-fd8c7411") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.xMPQoMHJxg ++ mktemp + local LAST_ERR=/tmp/tmp.7KO8kTokjh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xMPQoMHJxg deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.7KO8kTokjh + rm /tmp/tmp.xMPQoMHJxg /tmp/tmp.7KO8kTokjh + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.cp18BDxuzi +++ mktemp ++ local LAST_ERR=/tmp/tmp.G30dDTwHUB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cp18BDxuzi ++ cat /tmp/tmp.G30dDTwHUB ++ rm /tmp/tmp.cp18BDxuzi /tmp/tmp.G30dDTwHUB ++ return 0 + wait_pod percona-server-mongodb-operator-86f87f4c64-t56hz + local pod=percona-server-mongodb-operator-86f87f4c64-t56hz + set +o xtrace waiting for pod/percona-server-mongodb-operator-86f87f4c64-t56hz to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q70tEZ1Q0w +++ mktemp ++ local LAST_ERR=/tmp/tmp.RXX8rwZI8C ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Q70tEZ1Q0w ++ cat /tmp/tmp.RXX8rwZI8C ++ rm /tmp/tmp.Q70tEZ1Q0w /tmp/tmp.RXX8rwZI8C ++ return 0 + kubectl_bin logs percona-server-mongodb-operator-86f87f4c64-t56hz ++ mktemp + local LAST_OUT=/tmp/tmp.KgxN0g2EOw ++ mktemp + local LAST_ERR=/tmp/tmp.RlAxIeu4X4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs percona-server-mongodb-operator-86f87f4c64-t56hz + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KgxN0g2EOw + cat /tmp/tmp.RlAxIeu4X4 + rm /tmp/tmp.KgxN0g2EOw /tmp/tmp.RlAxIeu4X4 + return 0 2025-05-29T13:38:03.312Z INFO setup Manager starting up {"gitCommit": "fd8c741112ea964fbf13aa1c4ec3d69c8b2447e5", "gitBranch": "PR-1926-fd8c7411", "buildTime": "", "goVersion": "go1.24.3", "os": "linux", "arch": "amd64"} + create_namespace multi-storage-23296 + local namespace=multi-storage-23296 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ tail -n1 ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces multi-storage-23296' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces multi-storage-23296 ----------------------------------------------------------------------------------- + xargs kubectl delete ns + kubectl_bin delete namespace multi-storage-23296 --ignore-not-found ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.iaikhu3XW3 ++ mktemp + local LAST_OUT=/tmp/tmp.DyA5aYpB7E ++ mktemp + local LAST_ERR=/tmp/tmp.OBsYix0Bwo + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.tXShXGUTQ3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace multi-storage-23296 --ignore-not-found + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iaikhu3XW3 + cat /tmp/tmp.OBsYix0Bwo + rm /tmp/tmp.iaikhu3XW3 /tmp/tmp.OBsYix0Bwo + return 0 + kubectl_bin wait --for=delete namespace multi-storage-23296 ++ mktemp + local LAST_OUT=/tmp/tmp.8AGB3S7I5f ++ mktemp + local LAST_ERR=/tmp/tmp.ENDzJKn6ba + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace multi-storage-23296 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DyA5aYpB7E + cat /tmp/tmp.tXShXGUTQ3 + rm /tmp/tmp.DyA5aYpB7E /tmp/tmp.tXShXGUTQ3 + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8AGB3S7I5f + cat /tmp/tmp.ENDzJKn6ba + rm /tmp/tmp.8AGB3S7I5f /tmp/tmp.ENDzJKn6ba + return 0 + desc 'create namespace multi-storage-23296' + set +o xtrace ----------------------------------------------------------------------------------- create namespace multi-storage-23296 ----------------------------------------------------------------------------------- + kubectl_bin create namespace multi-storage-23296 ++ mktemp + local LAST_OUT=/tmp/tmp.XiARFkaqsJ ++ mktemp + local LAST_ERR=/tmp/tmp.GBRyyvQodV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace multi-storage-23296 namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XiARFkaqsJ namespace/multi-storage-23296 created + cat /tmp/tmp.GBRyyvQodV + rm /tmp/tmp.XiARFkaqsJ /tmp/tmp.GBRyyvQodV + return 0 + set_kube_ctx multi-storage-23296 + local namespace=multi-storage-23296 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.vPqz3VBUtr +++ mktemp ++ local LAST_ERR=/tmp/tmp.HbaZXSMz8G ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vPqz3VBUtr ++ cat /tmp/tmp.HbaZXSMz8G ++ rm /tmp/tmp.vPqz3VBUtr /tmp/tmp.HbaZXSMz8G ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1926-fd8c7411-27-cluster1 --namespace=multi-storage-23296 ++ mktemp + local LAST_OUT=/tmp/tmp.UZdJSmVZnf ++ mktemp + local LAST_ERR=/tmp/tmp.1D6FotKPRT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1926-fd8c7411-27-cluster1 --namespace=multi-storage-23296 namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UZdJSmVZnf Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1926-fd8c7411-27-cluster1" modified. + cat /tmp/tmp.1D6FotKPRT + rm /tmp/tmp.UZdJSmVZnf /tmp/tmp.1D6FotKPRT + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Thu May 29 13:38:26 2025 NAMESPACE: multi-storage-23296 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.multi-storage-23296.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace multi-storage-23296 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace multi-storage-23296 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace multi-storage-23296 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace multi-storage-23296 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IPyE6CM2RS +++ mktemp ++ local LAST_ERR=/tmp/tmp.6TlKRqzHHh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IPyE6CM2RS ++ cat /tmp/tmp.6TlKRqzHHh ++ rm /tmp/tmp.IPyE6CM2RS /tmp/tmp.6TlKRqzHHh ++ return 0 + MINIO_POD=minio-service-86dfccd949-cvbmd + wait_pod minio-service-86dfccd949-cvbmd + local pod=minio-service-86dfccd949-cvbmd + set +o xtrace waiting for pod/minio-service-86dfccd949-cvbmd to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.multi-storage-23296.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.n0bU1005Mn ++ mktemp + local LAST_ERR=/tmp/tmp.pcFoyWzGbL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.multi-storage-23296.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.n0bU1005Mn service/minio-service created + cat /tmp/tmp.pcFoyWzGbL + rm /tmp/tmp.n0bU1005Mn /tmp/tmp.pcFoyWzGbL + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.3hGpPQaGnP ++ mktemp + local LAST_ERR=/tmp/tmp.eeBYnIEuBy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3hGpPQaGnP make_bucket: operator-testing pod "aws-cli" deleted + cat /tmp/tmp.eeBYnIEuBy If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_multi-storage-23296 + rm /tmp/tmp.3hGpPQaGnP /tmp/tmp.eeBYnIEuBy + return 0 + create_minio_bucket operator-testing-2 + local bucket=operator-testing-2 + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing-2' ++ mktemp + local LAST_OUT=/tmp/tmp.3enZ4qCWQH ++ mktemp + local LAST_ERR=/tmp/tmp.B9d4BiXWTP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing-2' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3enZ4qCWQH make_bucket: operator-testing-2 pod "aws-cli" deleted + cat /tmp/tmp.B9d4BiXWTP + rm /tmp/tmp.3enZ4qCWQH /tmp/tmp.B9d4BiXWTP + return 0 + create_minio_bucket operator-testing-3 + local bucket=operator-testing-3 + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing-3' ++ mktemp + local LAST_OUT=/tmp/tmp.H1F3MTfbE2 ++ mktemp + local LAST_ERR=/tmp/tmp.8dZxmxPcRd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing-3' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.H1F3MTfbE2 make_bucket: operator-testing-3 pod "aws-cli" deleted + cat /tmp/tmp.8dZxmxPcRd If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_multi-storage-23296 + rm /tmp/tmp.H1F3MTfbE2 /tmp/tmp.8dZxmxPcRd + return 0 + log 'create secrets and start client' + set +o xtrace [2025-05-29T13:39:11+0000] create secrets and start client + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/conf/minio-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.yVjPVGpLWg ++ mktemp + local LAST_ERR=/tmp/tmp.yK8ZRPzPz3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/conf/minio-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yVjPVGpLWg secret/some-users created deployment.apps/psmdb-client created secret/minio-secret created + cat /tmp/tmp.yK8ZRPzPz3 + rm /tmp/tmp.yVjPVGpLWg /tmp/tmp.yK8ZRPzPz3 + return 0 + log 'create PSMDB cluster: some-name' + set +o xtrace [2025-05-29T13:39:13+0000] create PSMDB cluster: some-name + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/some-name.yml + kubectl_bin apply -f - ++ mktemp + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1926-fd8c7411"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + local LAST_OUT=/tmp/tmp.RuMbZZdehd + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' ++ mktemp + local LAST_ERR=/tmp/tmp.z36HDtetaX + local exit_status=0 + local timeout=4 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/some-name.yml ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RuMbZZdehd perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.z36HDtetaX + rm /tmp/tmp.RuMbZZdehd /tmp/tmp.z36HDtetaX + return 0 + log 'check if all 3 Pods started' + set +o xtrace [2025-05-29T13:39:17+0000] check if all 3 Pods started + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready......OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.....OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pWlR2G1qiB +++ mktemp ++ local LAST_ERR=/tmp/tmp.4KHXjUMnKI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pWlR2G1qiB ++ cat /tmp/tmp.4KHXjUMnKI ++ rm /tmp/tmp.pWlR2G1qiB /tmp/tmp.4KHXjUMnKI ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tWUD1P0Mkv +++ mktemp ++ local LAST_ERR=/tmp/tmp.hMeO74ej4d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tWUD1P0Mkv ++ cat /tmp/tmp.hMeO74ej4d ++ rm /tmp/tmp.tWUD1P0Mkv /tmp/tmp.hMeO74ej4d ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9L9ygoAGKD +++ mktemp ++ local LAST_ERR=/tmp/tmp.9sNplBVZlF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9L9ygoAGKD ++ cat /tmp/tmp.9sNplBVZlF ++ rm /tmp/tmp.9L9ygoAGKD /tmp/tmp.9sNplBVZlF ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2025-05-29T13:40:37.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2025-05-29T13:40:36.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2025-05-29T13:40:42.000+0000 I listening for the commands + create_user + log 'create user myApp' + set +o xtrace [2025-05-29T13:40:44+0000] create user myApp + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.multi-storage-23296 mongodb + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s8yyQAjWPK +++ mktemp ++ local LAST_ERR=/tmp/tmp.rBW23xWDbd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.s8yyQAjWPK ++ cat /tmp/tmp.rBW23xWDbd ++ rm /tmp/tmp.s8yyQAjWPK /tmp/tmp.rBW23xWDbd ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.A3lXQBAFER ++ mktemp + local LAST_ERR=/tmp/tmp.4Rawe90AW1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.A3lXQBAFER Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("54802a62-3ef8-4804-a61f-54c5acbb8f08") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.4Rawe90AW1 + rm /tmp/tmp.A3lXQBAFER /tmp/tmp.4Rawe90AW1 + return 0 + write_document 100500 + local x=100500 + local cmp_postfix= + log 'write document: 100500' + set +o xtrace [2025-05-29T13:40:47+0000] write document: 100500 + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RrcHTutooC +++ mktemp ++ local LAST_ERR=/tmp/tmp.XCzWot4qAV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RrcHTutooC ++ cat /tmp/tmp.XCzWot4qAV ++ rm /tmp/tmp.RrcHTutooC /tmp/tmp.XCzWot4qAV ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ezamYcqbc0 ++ mktemp + local LAST_ERR=/tmp/tmp.GNf4ZjckUi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ezamYcqbc0 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("777305e5-6198-4666-8bc3-c10fc9e386b8") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.GNf4ZjckUi + rm /tmp/tmp.ezamYcqbc0 /tmp/tmp.GNf4ZjckUi + return 0 + run_backup minio-1 backup-minio-1 logical + local storage=minio-1 + local name=backup-minio-1 + local type=logical + log 'creating backup: backup-minio-1 (logical)' + set +o xtrace [2025-05-29T13:40:49+0000] creating backup: backup-minio-1 (logical) + yq eval '.metadata.name = "backup-minio-1" | .spec.storageName = "minio-1" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/backup.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.l8S2hClr30 ++ mktemp + local LAST_ERR=/tmp/tmp.nTGI4KLelD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.l8S2hClr30 perconaservermongodbbackup.psmdb.percona.com/backup-minio-1 created + cat /tmp/tmp.nTGI4KLelD + rm /tmp/tmp.l8S2hClr30 /tmp/tmp.nTGI4KLelD + return 0 + wait_backup backup-minio-1 + local backup_name=backup-minio-1 + local target_state=ready + set +o xtrace waiting for backup-minio-1 to reach ready state..... + run_backup minio-2 backup-minio-2 logical + local storage=minio-2 + local name=backup-minio-2 + local type=logical + log 'creating backup: backup-minio-2 (logical)' + set +o xtrace [2025-05-29T13:41:04+0000] creating backup: backup-minio-2 (logical) + yq eval '.metadata.name = "backup-minio-2" | .spec.storageName = "minio-2" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/backup.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.tkZdtPVgBm ++ mktemp + local LAST_ERR=/tmp/tmp.YDJEPQInrQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tkZdtPVgBm perconaservermongodbbackup.psmdb.percona.com/backup-minio-2 created + cat /tmp/tmp.YDJEPQInrQ + rm /tmp/tmp.tkZdtPVgBm /tmp/tmp.YDJEPQInrQ + return 0 + wait_backup backup-minio-2 + local backup_name=backup-minio-2 + local target_state=ready + set +o xtrace waiting for backup-minio-2 to reach ready state..... + run_backup minio-3 backup-minio-3 physical + local storage=minio-3 + local name=backup-minio-3 + local type=physical + log 'creating backup: backup-minio-3 (physical)' + set +o xtrace [2025-05-29T13:41:20+0000] creating backup: backup-minio-3 (physical) + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-minio-3" | .spec.storageName = "minio-3" | .spec.type = "physical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/backup.yml ++ mktemp + local LAST_OUT=/tmp/tmp.nraf4JJdcT ++ mktemp + local LAST_ERR=/tmp/tmp.S72mllTOlr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nraf4JJdcT perconaservermongodbbackup.psmdb.percona.com/backup-minio-3 created + cat /tmp/tmp.S72mllTOlr + rm /tmp/tmp.nraf4JJdcT /tmp/tmp.S72mllTOlr + return 0 + wait_backup backup-minio-3 + local backup_name=backup-minio-3 + local target_state=ready + set +o xtrace waiting for backup-minio-3 to reach ready state.... + write_document 100501 + local x=100501 + local cmp_postfix= + log 'write document: 100501' + set +o xtrace [2025-05-29T13:41:35+0000] write document: 100501 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mXDpRCk8Mt +++ mktemp ++ local LAST_ERR=/tmp/tmp.cXEEqfTfP8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mXDpRCk8Mt ++ cat /tmp/tmp.cXEEqfTfP8 ++ rm /tmp/tmp.mXDpRCk8Mt /tmp/tmp.cXEEqfTfP8 ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Bhdslp2KHe ++ mktemp + local LAST_ERR=/tmp/tmp.Ez69HsmCQO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Bhdslp2KHe Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("f9b7ac34-b159-499b-9409-d87f3d4aaa5b") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.Ez69HsmCQO + rm /tmp/tmp.Bhdslp2KHe /tmp/tmp.Ez69HsmCQO + return 0 + write_document 100502 + local x=100502 + local cmp_postfix= + log 'write document: 100502' + set +o xtrace [2025-05-29T13:41:38+0000] write document: 100502 + run_mongo 'use myApp\n db.test.insert({ x: 100502 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100502 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.au073AjcVz +++ mktemp ++ local LAST_ERR=/tmp/tmp.It5A3OIEWq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.au073AjcVz ++ cat /tmp/tmp.It5A3OIEWq ++ rm /tmp/tmp.au073AjcVz /tmp/tmp.It5A3OIEWq ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100502 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Jrrx0ncT95 ++ mktemp + local LAST_ERR=/tmp/tmp.Zpj7bdbg81 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100502 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Jrrx0ncT95 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("b8746202-2d5f-4ea1-89f8-ead38958318c") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.Zpj7bdbg81 + rm /tmp/tmp.Jrrx0ncT95 /tmp/tmp.Zpj7bdbg81 + return 0 + write_document 100503 + local x=100503 + local cmp_postfix= + log 'write document: 100503' + set +o xtrace [2025-05-29T13:41:43+0000] write document: 100503 + run_mongo 'use myApp\n db.test.insert({ x: 100503 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100503 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VtiV7PLgz4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.E2VzTYmvAl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VtiV7PLgz4 ++ cat /tmp/tmp.E2VzTYmvAl ++ rm /tmp/tmp.VtiV7PLgz4 /tmp/tmp.E2VzTYmvAl ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100503 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.zSSpxj9TGB ++ mktemp + local LAST_ERR=/tmp/tmp.Tp8LbBxFpO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100503 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zSSpxj9TGB Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ca6eadce-89a4-47d6-a596-c5190f387940") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.Tp8LbBxFpO + rm /tmp/tmp.zSSpxj9TGB /tmp/tmp.Tp8LbBxFpO + return 0 + write_document 100504 + local x=100504 + local cmp_postfix= + log 'write document: 100504' + set +o xtrace [2025-05-29T13:41:47+0000] write document: 100504 + run_mongo 'use myApp\n db.test.insert({ x: 100504 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100504 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.424g05SaE7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ErE2T0Q5lI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.424g05SaE7 ++ cat /tmp/tmp.ErE2T0Q5lI ++ rm /tmp/tmp.424g05SaE7 /tmp/tmp.ErE2T0Q5lI ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100504 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.8vLQ7TAcfP ++ mktemp + local LAST_ERR=/tmp/tmp.imFR1Fk6Gf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100504 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8vLQ7TAcfP Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("0326af88-aa8b-4052-ab49-1fa2e529cb3c") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.imFR1Fk6Gf + rm /tmp/tmp.8vLQ7TAcfP /tmp/tmp.imFR1Fk6Gf + return 0 + write_document 100505 + local x=100505 + local cmp_postfix= + log 'write document: 100505' + set +o xtrace [2025-05-29T13:41:52+0000] write document: 100505 + run_mongo 'use myApp\n db.test.insert({ x: 100505 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100505 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6K0b7uhoW0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fa3BVBxKo7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6K0b7uhoW0 ++ cat /tmp/tmp.Fa3BVBxKo7 ++ rm /tmp/tmp.6K0b7uhoW0 /tmp/tmp.Fa3BVBxKo7 ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100505 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.duuDjxFCwn ++ mktemp + local LAST_ERR=/tmp/tmp.2uLv3L2YuL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100505 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.duuDjxFCwn Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("4032a436-b48d-459f-acdd-4d2a9f1abf2d") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.2uLv3L2YuL + rm /tmp/tmp.duuDjxFCwn /tmp/tmp.2uLv3L2YuL + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.multi-storage-23296 + local command=find + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-29T13:41:55+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb '' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LMjvlqFc4t +++ mktemp ++ local LAST_ERR=/tmp/tmp.0Q2wW9LmhY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LMjvlqFc4t ++ cat /tmp/tmp.0Q2wW9LmhY ++ rm /tmp/tmp.LMjvlqFc4t /tmp/tmp.0Q2wW9LmhY ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.hHoRh1avAb ++ mktemp + local LAST_ERR=/tmp/tmp.LKsjsPrHbY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hHoRh1avAb + cat /tmp/tmp.LKsjsPrHbY + rm /tmp/tmp.hHoRh1avAb /tmp/tmp.LKsjsPrHbY + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/compare/find.json /tmp/tmp.I7LLtUMviw/find + wait_for_restorable_time backup-minio-3 + local backup_name=backup-minio-3 ++ get_latest_restorable_time backup-minio-3 ++ local backup_name=backup-minio-3 +++ kubectl_bin get psmdb-backup backup-minio-3 -o yaml +++ yq .status.latestRestorableTime ++++ mktemp +++ local LAST_OUT=/tmp/tmp.L0zmSIz0NY ++++ mktemp +++ local LAST_ERR=/tmp/tmp.RNXnRgY2j3 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb-backup backup-minio-3 -o yaml +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.L0zmSIz0NY +++ cat /tmp/tmp.RNXnRgY2j3 +++ rm /tmp/tmp.L0zmSIz0NY /tmp/tmp.RNXnRgY2j3 +++ return 0 ++ local time=2025-05-29T13:41:25Z ++ [[ 2025-05-29T13:41:25Z == \n\u\l\l ]] ++ echo -n 2025-05-29T13:41:25Z + local latest_restorable_time=2025-05-29T13:41:25Z + local retries=0 + echo -n 'waiting for psmdb-backup/backup-minio-3'\''s latest restorable time' waiting for psmdb-backup/backup-minio-3's latest restorable time+ [[ 2025-05-29T13:41:25Z != \n\u\l\l ]] + echo '.OK latestRestorableTime: 2025-05-29T13:41:25Z' .OK latestRestorableTime: 2025-05-29T13:41:25Z + wait_for_advanced_restorable_time backup-minio-3 + local backup_name=backup-minio-3 +++ get_latest_restorable_time backup-minio-3 +++ local backup_name=backup-minio-3 ++++ yq .status.latestRestorableTime ++++ kubectl_bin get psmdb-backup backup-minio-3 -o yaml +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.aVr2nXwmYC +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.Ig9LvRyTic ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl get psmdb-backup backup-minio-3 -o yaml ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.aVr2nXwmYC ++++ cat /tmp/tmp.Ig9LvRyTic ++++ rm /tmp/tmp.aVr2nXwmYC /tmp/tmp.Ig9LvRyTic ++++ return 0 +++ local time=2025-05-29T13:41:25Z +++ [[ 2025-05-29T13:41:25Z == \n\u\l\l ]] +++ echo -n 2025-05-29T13:41:25Z ++ datetime_to_timestamp 2025-05-29T13:41:25Z ++ local datetime=2025-05-29T13:41:25Z ++ TZ=UTC ++ /usr/bin/date -d2025-05-29T13:41:25Z +%s + local latest_restorable_time=1748526085 ++ get_backup_last_write backup-minio-3 ++ local backup_name=backup-minio-3 +++ kubectl_bin get psmdb-backup backup-minio-3 -o yaml ++++ mktemp +++ local LAST_OUT=/tmp/tmp.AJY1Og9xkO ++++ mktemp +++ local LAST_ERR=/tmp/tmp.pnLmJ0W88w +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb-backup backup-minio-3 -o yaml +++ yq .status.pbmName +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.AJY1Og9xkO +++ cat /tmp/tmp.pnLmJ0W88w +++ rm /tmp/tmp.AJY1Og9xkO /tmp/tmp.pnLmJ0W88w +++ return 0 ++ local pbm_name=2025-05-29T13:41:24Z ++ jq --arg v 2025-05-29T13:41:24Z '.snapshots[] | select(.name==$v) | .restoreTo' ++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm list -o json +++ mktemp ++ local LAST_OUT=/tmp/tmp.ft6KjbOSFh +++ mktemp ++ local LAST_ERR=/tmp/tmp.3T6T3OBAl0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm list -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ft6KjbOSFh ++ cat /tmp/tmp.3T6T3OBAl0 ++ rm /tmp/tmp.ft6KjbOSFh /tmp/tmp.3T6T3OBAl0 ++ return 0 + local latest_write=1748526086 + local retries=0 ++ timestamp_to_datetime 1748526086 ++ local ts=1748526086 ++ TZ=UTC ++ /usr/bin/date -d@1748526086 +%Y-%m-%dT%H:%M:%S%Z + echo -n 'waiting for psmdb-backup/backup-minio-3'\''s latest restorable time to advance its latest write (2025-05-29T13:41:26UTC)' waiting for psmdb-backup/backup-minio-3's latest restorable time to advance its latest write (2025-05-29T13:41:26UTC)+ [[ 1748526085 -gt 1748526086 ]] +++ get_latest_restorable_time backup-minio-3 +++ local backup_name=backup-minio-3 ++++ yq .status.latestRestorableTime ++++ kubectl_bin get psmdb-backup backup-minio-3 -o yaml +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.feMPMfzzcC +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.DGZA1TB5D0 ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl get psmdb-backup backup-minio-3 -o yaml ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.feMPMfzzcC ++++ cat /tmp/tmp.DGZA1TB5D0 ++++ rm /tmp/tmp.feMPMfzzcC /tmp/tmp.DGZA1TB5D0 ++++ return 0 +++ local time=2025-05-29T13:41:25Z +++ [[ 2025-05-29T13:41:25Z == \n\u\l\l ]] +++ echo -n 2025-05-29T13:41:25Z ++ datetime_to_timestamp 2025-05-29T13:41:25Z ++ local datetime=2025-05-29T13:41:25Z ++ TZ=UTC ++ /usr/bin/date -d2025-05-29T13:41:25Z +%s + latest_restorable_time=1748526085 + [[ 0 -gt 5 ]] + let retries+=1 + echo -n . .+ sleep 70 + [[ 1748526085 -gt 1748526086 ]] +++ get_latest_restorable_time backup-minio-3 +++ local backup_name=backup-minio-3 ++++ kubectl_bin get psmdb-backup backup-minio-3 -o yaml ++++ yq .status.latestRestorableTime +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.lMTbGsNFnx +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.EtQP4FFyCo ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl get psmdb-backup backup-minio-3 -o yaml ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.lMTbGsNFnx ++++ cat /tmp/tmp.EtQP4FFyCo ++++ rm /tmp/tmp.lMTbGsNFnx /tmp/tmp.EtQP4FFyCo ++++ return 0 +++ local time=2025-05-29T13:42:25Z +++ [[ 2025-05-29T13:42:25Z == \n\u\l\l ]] +++ echo -n 2025-05-29T13:42:25Z ++ datetime_to_timestamp 2025-05-29T13:42:25Z ++ local datetime=2025-05-29T13:42:25Z ++ TZ=UTC ++ /usr/bin/date -d2025-05-29T13:42:25Z +%s + latest_restorable_time=1748526145 + [[ 1 -gt 5 ]] + let retries+=1 + echo -n . .+ sleep 70 + [[ 1748526145 -gt 1748526086 ]] ++ timestamp_to_datetime 1748526145 ++ local ts=1748526145 ++ TZ=UTC ++ /usr/bin/date -d@1748526145 +%Y-%m-%dT%H:%M:%S%Z + echo '.OK latestRestorableTime: 2025-05-29T13:42:25UTC' .OK latestRestorableTime: 2025-05-29T13:42:25UTC ++ get_latest_restorable_time backup-minio-3 ++ local backup_name=backup-minio-3 +++ kubectl_bin get psmdb-backup backup-minio-3 -o yaml +++ yq .status.latestRestorableTime ++++ mktemp +++ local LAST_OUT=/tmp/tmp.cWYOckWHfV ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Z9qJCNzU25 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb-backup backup-minio-3 -o yaml +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.cWYOckWHfV +++ cat /tmp/tmp.Z9qJCNzU25 +++ rm /tmp/tmp.cWYOckWHfV /tmp/tmp.Z9qJCNzU25 +++ return 0 ++ local time=2025-05-29T13:44:25Z ++ [[ 2025-05-29T13:44:25Z == \n\u\l\l ]] ++ echo -n 2025-05-29T13:44:25Z + restore_time=2025-05-29T13:44:25Z ++ format_pitr_target 2025-05-29T13:44:25Z ++ local target=2025-05-29T13:44:25Z ++ echo 2025-05-29T13:44:25Z ++ sed 's/T/ /' ++ sed s/Z// + check_recovery backup-minio-3 '2025-05-29 13:44:25' + local backup_name=backup-minio-3 + local 'restore_time=2025-05-29 13:44:25' + local cmp_postfix= + drop_collection myApp test myApp myPass + local db=myApp + local collection=test + local user=myApp + local pass=myPass + log 'dropping collection: myApp.test' + set +o xtrace [2025-05-29T13:44:27+0000] dropping collection: myApp.test + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HdsDlB4nka +++ mktemp ++ local LAST_ERR=/tmp/tmp.r3lB2fEo5b ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HdsDlB4nka ++ cat /tmp/tmp.r3lB2fEo5b ++ rm /tmp/tmp.HdsDlB4nka /tmp/tmp.r3lB2fEo5b ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.oVKm5wtccU ++ mktemp + local LAST_ERR=/tmp/tmp.vsOqVl9e5W + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oVKm5wtccU Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("f2e8601f-cdba-44d2-b2a6-36a60bc34187") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.vsOqVl9e5W + rm /tmp/tmp.oVKm5wtccU /tmp/tmp.vsOqVl9e5W + return 0 + log 'creating restore: restore-backup-minio-3 restore_to: 2025-05-29 13:44:25' + set +o xtrace [2025-05-29T13:44:30+0000] creating restore: restore-backup-minio-3 restore_to: 2025-05-29 13:44:25 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio-3/' + /usr/bin/sed -e 's/backupName:/backupName: backup-minio-3/' + /usr/bin/sed -e 's/date:/date: 2025-05-29 13:44:25/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ROUXFADg9i ++ mktemp + local LAST_ERR=/tmp/tmp.G9pXFLMP5i + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ROUXFADg9i perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-3 created + cat /tmp/tmp.G9pXFLMP5i + rm /tmp/tmp.ROUXFADg9i /tmp/tmp.G9pXFLMP5i + return 0 + wait_restore backup-minio-3 some-name requested 0 900 1 + local backup_name=backup-minio-3 + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=900 + local ok_if_ready=1 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-3 object to be createdOK Waiting psmdb-restore/restore-backup-minio-3 to reach state "requested" ..OK + [[ 0 -eq 1 ]] + wait_restore backup-minio-3 some-name ready 0 1600 + local backup_name=backup-minio-3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1600 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-3 object to be createdOK Waiting psmdb-restore/restore-backup-minio-3 to reach state "ready" ..OK + [[ 0 -eq 1 ]] + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mJkuNVrg2a +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lc3aJEYUOz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mJkuNVrg2a ++ cat /tmp/tmp.Lc3aJEYUOz ++ rm /tmp/tmp.mJkuNVrg2a /tmp/tmp.Lc3aJEYUOz ++ return 0 + [[ false == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qkc8QASiVA +++ mktemp ++ local LAST_ERR=/tmp/tmp.wVEAkwCAJc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Qkc8QASiVA ++ cat /tmp/tmp.wVEAkwCAJc ++ rm /tmp/tmp.Qkc8QASiVA /tmp/tmp.wVEAkwCAJc ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nCFrYcaKSI +++ mktemp ++ local LAST_ERR=/tmp/tmp.re8OrJIzw4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nCFrYcaKSI ++ cat /tmp/tmp.re8OrJIzw4 ++ rm /tmp/tmp.nCFrYcaKSI /tmp/tmp.re8OrJIzw4 ++ return 0 + [[ false == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + compare_mongo_cmd find myApp:myPass@some-name-rs0.multi-storage-23296 '' .svc.cluster.local myApp test 'sort( { x: 1 } )' + local command=find + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local 'sort=sort( { x: 1 } )' + local 'full_command=db.test.find()' + [[ ! -z sort( { x: 1 } ) ]] + full_command='db.test.find().sort( { x: 1 } )' + log 'running db.test.find().sort( { x: 1 } ) in myApp' + set +o xtrace [2025-05-29T13:50:06+0000] running db.test.find().sort( { x: 1 } ) in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find().sort( { x: 1 } )' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find().sort( { x: 1 } )' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.OkOchw04Ur +++ mktemp ++ local LAST_ERR=/tmp/tmp.kbIMutA0ub ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OkOchw04Ur ++ cat /tmp/tmp.kbIMutA0ub ++ rm /tmp/tmp.OkOchw04Ur /tmp/tmp.kbIMutA0ub ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.find().sort( { x: 1 } )\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rmofizog5L ++ mktemp + local LAST_ERR=/tmp/tmp.BJGhsuu3uD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.find().sort( { x: 1 } )\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rmofizog5L + cat /tmp/tmp.BJGhsuu3uD + rm /tmp/tmp.rmofizog5L /tmp/tmp.BJGhsuu3uD + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/compare/find.json /tmp/tmp.I7LLtUMviw/find + log 'sleeping for 60 seconds for resync to finish' + set +o xtrace [2025-05-29T13:50:09+0000] sleeping for 60 seconds for resync to finish + sleep 60 + log 'changing main storage from minio-1 to minio-2' + set +o xtrace [2025-05-29T13:51:09+0000] changing main storage from minio-1 to minio-2 + kubectl patch psmdb some-name --type=json '-p=[ {"op": "remove", "path": "/spec/backup/storages/minio-1/main"}, {"op": "add", "path": "/spec/backup/storages/minio-2/main", "value": true} ]' perconaservermongodb.psmdb.percona.com/some-name patched + log 'sleeping for 60 seconds for resync to finish' + set +o xtrace [2025-05-29T13:51:10+0000] sleeping for 60 seconds for resync to finish + sleep 60 + run_backup minio-2 backup-minio-2-1 logical + local storage=minio-2 + local name=backup-minio-2-1 + local type=logical + log 'creating backup: backup-minio-2-1 (logical)' + set +o xtrace [2025-05-29T13:52:10+0000] creating backup: backup-minio-2-1 (logical) + yq eval '.metadata.name = "backup-minio-2-1" | .spec.storageName = "minio-2" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/backup.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.dIjt7jAZeX ++ mktemp + local LAST_ERR=/tmp/tmp.5rhNba5uHB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dIjt7jAZeX perconaservermongodbbackup.psmdb.percona.com/backup-minio-2-1 created + cat /tmp/tmp.5rhNba5uHB + rm /tmp/tmp.dIjt7jAZeX /tmp/tmp.5rhNba5uHB + return 0 + wait_backup backup-minio-2-1 + local backup_name=backup-minio-2-1 + local target_state=ready + set +o xtrace waiting for backup-minio-2-1 to reach ready state..... + write_document 100506 + local x=100506 + local cmp_postfix= + log 'write document: 100506' + set +o xtrace [2025-05-29T13:52:23+0000] write document: 100506 + run_mongo 'use myApp\n db.test.insert({ x: 100506 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100506 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xKHSCRkAFi +++ mktemp ++ local LAST_ERR=/tmp/tmp.1FaAH45ZrW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xKHSCRkAFi ++ cat /tmp/tmp.1FaAH45ZrW ++ rm /tmp/tmp.xKHSCRkAFi /tmp/tmp.1FaAH45ZrW ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100506 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.5W3dS4WOmM ++ mktemp + local LAST_ERR=/tmp/tmp.hCuEKilpwG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100506 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5W3dS4WOmM Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("6c697213-99da-42b7-bcaf-8597a1ea0188") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.hCuEKilpwG + rm /tmp/tmp.5W3dS4WOmM /tmp/tmp.hCuEKilpwG + return 0 + write_document 100507 + local x=100507 + local cmp_postfix= + log 'write document: 100507' + set +o xtrace [2025-05-29T13:52:25+0000] write document: 100507 + run_mongo 'use myApp\n db.test.insert({ x: 100507 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100507 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kndQOAkDl6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zFBmVwSpKi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kndQOAkDl6 ++ cat /tmp/tmp.zFBmVwSpKi ++ rm /tmp/tmp.kndQOAkDl6 /tmp/tmp.zFBmVwSpKi ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100507 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Erv3QpxV0A ++ mktemp + local LAST_ERR=/tmp/tmp.5A2VNUQ9jx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100507 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Erv3QpxV0A Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("7f159bf2-1595-4ddd-ab22-941a5bfe07b3") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.5A2VNUQ9jx + rm /tmp/tmp.Erv3QpxV0A /tmp/tmp.5A2VNUQ9jx + return 0 + write_document 100508 + local x=100508 + local cmp_postfix= + log 'write document: 100508' + set +o xtrace [2025-05-29T13:52:28+0000] write document: 100508 + run_mongo 'use myApp\n db.test.insert({ x: 100508 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100508 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yhxZq6dj9q +++ mktemp ++ local LAST_ERR=/tmp/tmp.nwZH4qTDsE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yhxZq6dj9q ++ cat /tmp/tmp.nwZH4qTDsE ++ rm /tmp/tmp.yhxZq6dj9q /tmp/tmp.nwZH4qTDsE ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100508 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rOfClvsKnK ++ mktemp + local LAST_ERR=/tmp/tmp.4ay8cJIrAw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100508 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rOfClvsKnK Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("c77678c9-33a0-41dd-941a-ade9016c5996") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.4ay8cJIrAw + rm /tmp/tmp.rOfClvsKnK /tmp/tmp.4ay8cJIrAw + return 0 + write_document 100509 + local x=100509 + local cmp_postfix= + log 'write document: 100509' + set +o xtrace [2025-05-29T13:52:32+0000] write document: 100509 + run_mongo 'use myApp\n db.test.insert({ x: 100509 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100509 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dcxLQz6pXC +++ mktemp ++ local LAST_ERR=/tmp/tmp.lNihT1d4ZE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dcxLQz6pXC ++ cat /tmp/tmp.lNihT1d4ZE ++ rm /tmp/tmp.dcxLQz6pXC /tmp/tmp.lNihT1d4ZE ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100509 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.lE2ivNET31 ++ mktemp + local LAST_ERR=/tmp/tmp.frpBhlqQQe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100509 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lE2ivNET31 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("82900444-8d9c-4fb3-bf99-f517dabcc1f3") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.frpBhlqQQe + rm /tmp/tmp.lE2ivNET31 /tmp/tmp.frpBhlqQQe + return 0 + write_document 100510 + local x=100510 + local cmp_postfix= + log 'write document: 100510' + set +o xtrace [2025-05-29T13:52:34+0000] write document: 100510 + run_mongo 'use myApp\n db.test.insert({ x: 100510 })' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.insert({ x: 100510 })' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.frLRvEmq2y +++ mktemp ++ local LAST_ERR=/tmp/tmp.rpj3jPdqB2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.frLRvEmq2y ++ cat /tmp/tmp.rpj3jPdqB2 ++ rm /tmp/tmp.frLRvEmq2y /tmp/tmp.rpj3jPdqB2 ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100510 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.oprKKhrSdU ++ mktemp + local LAST_ERR=/tmp/tmp.JY0c1YNT5u + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100510 })\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oprKKhrSdU Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("08897085-068b-4784-9cf0-82a6f688bb32") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.JY0c1YNT5u + rm /tmp/tmp.oprKKhrSdU /tmp/tmp.JY0c1YNT5u + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.multi-storage-23296 -2nd .svc.cluster.local myApp test 'sort( { x: 1 } )' + local command=find + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local 'sort=sort( { x: 1 } )' + local 'full_command=db.test.find()' + [[ ! -z sort( { x: 1 } ) ]] + full_command='db.test.find().sort( { x: 1 } )' + log 'running db.test.find().sort( { x: 1 } ) in myApp' + set +o xtrace [2025-05-29T13:52:36+0000] running db.test.find().sort( { x: 1 } ) in myApp + run_mongo 'use myApp\n db.test.find().sort( { x: 1 } )' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find().sort( { x: 1 } )' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h3FdGYqnsq +++ mktemp ++ local LAST_ERR=/tmp/tmp.b6jRi736OK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.h3FdGYqnsq ++ cat /tmp/tmp.b6jRi736OK ++ rm /tmp/tmp.h3FdGYqnsq /tmp/tmp.b6jRi736OK ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.find().sort( { x: 1 } )\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.XB5yMK62xE ++ mktemp + local LAST_ERR=/tmp/tmp.h0isKRlua3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.find().sort( { x: 1 } )\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XB5yMK62xE + cat /tmp/tmp.h0isKRlua3 + rm /tmp/tmp.XB5yMK62xE /tmp/tmp.h0isKRlua3 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/compare/find-2nd.json /tmp/tmp.I7LLtUMviw/find-2nd + wait_for_restorable_time backup-minio-2-1 + local backup_name=backup-minio-2-1 ++ get_latest_restorable_time backup-minio-2-1 ++ local backup_name=backup-minio-2-1 +++ yq .status.latestRestorableTime +++ kubectl_bin get psmdb-backup backup-minio-2-1 -o yaml ++++ mktemp +++ local LAST_OUT=/tmp/tmp.UHUQD4Y1qr ++++ mktemp +++ local LAST_ERR=/tmp/tmp.IsLqjBnnEt +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb-backup backup-minio-2-1 -o yaml +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.UHUQD4Y1qr +++ cat /tmp/tmp.IsLqjBnnEt +++ rm /tmp/tmp.UHUQD4Y1qr /tmp/tmp.IsLqjBnnEt +++ return 0 ++ local time=2025-05-29T13:52:13Z ++ [[ 2025-05-29T13:52:13Z == \n\u\l\l ]] ++ echo -n 2025-05-29T13:52:13Z + local latest_restorable_time=2025-05-29T13:52:13Z + local retries=0 + echo -n 'waiting for psmdb-backup/backup-minio-2-1'\''s latest restorable time' waiting for psmdb-backup/backup-minio-2-1's latest restorable time+ [[ 2025-05-29T13:52:13Z != \n\u\l\l ]] + echo '.OK latestRestorableTime: 2025-05-29T13:52:13Z' .OK latestRestorableTime: 2025-05-29T13:52:13Z + wait_for_advanced_restorable_time backup-minio-2-1 + local backup_name=backup-minio-2-1 +++ get_latest_restorable_time backup-minio-2-1 +++ local backup_name=backup-minio-2-1 ++++ kubectl_bin get psmdb-backup backup-minio-2-1 -o yaml ++++ yq .status.latestRestorableTime +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.ZfcMlsCaZR +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.q00SYW1XLN ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl get psmdb-backup backup-minio-2-1 -o yaml ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.ZfcMlsCaZR ++++ cat /tmp/tmp.q00SYW1XLN ++++ rm /tmp/tmp.ZfcMlsCaZR /tmp/tmp.q00SYW1XLN ++++ return 0 +++ local time=2025-05-29T13:52:13Z +++ [[ 2025-05-29T13:52:13Z == \n\u\l\l ]] +++ echo -n 2025-05-29T13:52:13Z ++ datetime_to_timestamp 2025-05-29T13:52:13Z ++ local datetime=2025-05-29T13:52:13Z ++ TZ=UTC ++ /usr/bin/date -d2025-05-29T13:52:13Z +%s + local latest_restorable_time=1748526733 ++ get_backup_last_write backup-minio-2-1 ++ local backup_name=backup-minio-2-1 +++ kubectl_bin get psmdb-backup backup-minio-2-1 -o yaml +++ yq .status.pbmName ++++ mktemp +++ local LAST_OUT=/tmp/tmp.zvxKygr1LC ++++ mktemp +++ local LAST_ERR=/tmp/tmp.yef4JCpbcO +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb-backup backup-minio-2-1 -o yaml +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.zvxKygr1LC +++ cat /tmp/tmp.yef4JCpbcO +++ rm /tmp/tmp.zvxKygr1LC /tmp/tmp.yef4JCpbcO +++ return 0 ++ local pbm_name=2025-05-29T13:52:12Z ++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm list -o json ++ jq --arg v 2025-05-29T13:52:12Z '.snapshots[] | select(.name==$v) | .restoreTo' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rv0L18trFa +++ mktemp ++ local LAST_ERR=/tmp/tmp.S3kcKbYzeG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm list -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rv0L18trFa ++ cat /tmp/tmp.S3kcKbYzeG ++ rm /tmp/tmp.rv0L18trFa /tmp/tmp.S3kcKbYzeG ++ return 0 + local latest_write=1748526737 + local retries=0 ++ timestamp_to_datetime 1748526737 ++ local ts=1748526737 ++ TZ=UTC ++ /usr/bin/date -d@1748526737 +%Y-%m-%dT%H:%M:%S%Z + echo -n 'waiting for psmdb-backup/backup-minio-2-1'\''s latest restorable time to advance its latest write (2025-05-29T13:52:17UTC)' waiting for psmdb-backup/backup-minio-2-1's latest restorable time to advance its latest write (2025-05-29T13:52:17UTC)+ [[ 1748526733 -gt 1748526737 ]] +++ get_latest_restorable_time backup-minio-2-1 +++ local backup_name=backup-minio-2-1 ++++ yq .status.latestRestorableTime ++++ kubectl_bin get psmdb-backup backup-minio-2-1 -o yaml +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.zcsM1lnbLl +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.Qe76OcM4hm ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl get psmdb-backup backup-minio-2-1 -o yaml ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.zcsM1lnbLl ++++ cat /tmp/tmp.Qe76OcM4hm ++++ rm /tmp/tmp.zcsM1lnbLl /tmp/tmp.Qe76OcM4hm ++++ return 0 +++ local time=2025-05-29T13:52:13Z +++ [[ 2025-05-29T13:52:13Z == \n\u\l\l ]] +++ echo -n 2025-05-29T13:52:13Z ++ datetime_to_timestamp 2025-05-29T13:52:13Z ++ local datetime=2025-05-29T13:52:13Z ++ TZ=UTC ++ /usr/bin/date -d2025-05-29T13:52:13Z +%s + latest_restorable_time=1748526733 + [[ 0 -gt 5 ]] + let retries+=1 + echo -n . .+ sleep 70 + [[ 1748526733 -gt 1748526737 ]] +++ get_latest_restorable_time backup-minio-2-1 +++ local backup_name=backup-minio-2-1 ++++ kubectl_bin get psmdb-backup backup-minio-2-1 -o yaml ++++ yq .status.latestRestorableTime +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.hQSxxZ1nsc +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.yz8rl3Uf1d ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl get psmdb-backup backup-minio-2-1 -o yaml ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.hQSxxZ1nsc ++++ cat /tmp/tmp.yz8rl3Uf1d ++++ rm /tmp/tmp.hQSxxZ1nsc /tmp/tmp.yz8rl3Uf1d ++++ return 0 +++ local time=2025-05-29T13:53:41Z +++ [[ 2025-05-29T13:53:41Z == \n\u\l\l ]] +++ echo -n 2025-05-29T13:53:41Z ++ datetime_to_timestamp 2025-05-29T13:53:41Z ++ local datetime=2025-05-29T13:53:41Z ++ TZ=UTC ++ /usr/bin/date -d2025-05-29T13:53:41Z +%s + latest_restorable_time=1748526821 + [[ 1 -gt 5 ]] + let retries+=1 + echo -n . .+ sleep 70 + [[ 1748526821 -gt 1748526737 ]] ++ timestamp_to_datetime 1748526821 ++ local ts=1748526821 ++ TZ=UTC ++ /usr/bin/date -d@1748526821 +%Y-%m-%dT%H:%M:%S%Z + echo '.OK latestRestorableTime: 2025-05-29T13:53:41UTC' .OK latestRestorableTime: 2025-05-29T13:53:41UTC ++ get_latest_restorable_time backup-minio-2-1 ++ local backup_name=backup-minio-2-1 +++ kubectl_bin get psmdb-backup backup-minio-2-1 -o yaml +++ yq .status.latestRestorableTime ++++ mktemp +++ local LAST_OUT=/tmp/tmp.QQSVCXydpp ++++ mktemp +++ local LAST_ERR=/tmp/tmp.WikcpCJ8KT +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb-backup backup-minio-2-1 -o yaml +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.QQSVCXydpp +++ cat /tmp/tmp.WikcpCJ8KT +++ rm /tmp/tmp.QQSVCXydpp /tmp/tmp.WikcpCJ8KT +++ return 0 ++ local time=2025-05-29T13:54:41Z ++ [[ 2025-05-29T13:54:41Z == \n\u\l\l ]] ++ echo -n 2025-05-29T13:54:41Z + restore_time=2025-05-29T13:54:41Z ++ format_pitr_target 2025-05-29T13:54:41Z ++ local target=2025-05-29T13:54:41Z ++ sed s/Z// ++ echo 2025-05-29T13:54:41Z ++ sed 's/T/ /' + check_recovery backup-minio-2-1 '2025-05-29 13:54:41' -2nd + local backup_name=backup-minio-2-1 + local 'restore_time=2025-05-29 13:54:41' + local cmp_postfix=-2nd + drop_collection myApp test myApp myPass + local db=myApp + local collection=test + local user=myApp + local pass=myPass + log 'dropping collection: myApp.test' + set +o xtrace [2025-05-29T13:55:05+0000] dropping collection: myApp.test + run_mongo 'use myApp\n db.test.drop()' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb + local 'command=use myApp\n db.test.drop()' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mwe0uNUegK +++ mktemp ++ local LAST_ERR=/tmp/tmp.WZ08lkWl7u ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mwe0uNUegK ++ cat /tmp/tmp.WZ08lkWl7u ++ rm /tmp/tmp.mwe0uNUegK /tmp/tmp.WZ08lkWl7u ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Vx9XdDzIsS ++ mktemp + local LAST_ERR=/tmp/tmp.LLslSUTCZa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.drop()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Vx9XdDzIsS Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("41066fd8-c602-4b50-94cf-39cd72f355c8") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.LLslSUTCZa + rm /tmp/tmp.Vx9XdDzIsS /tmp/tmp.LLslSUTCZa + return 0 + log 'creating restore: restore-backup-minio-2-1 restore_to: 2025-05-29 13:54:41' + set +o xtrace [2025-05-29T13:55:09+0000] creating restore: restore-backup-minio-2-1 restore_to: 2025-05-29 13:54:41 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio-2-1/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.kVHSqmwrVs ++ mktemp + local LAST_ERR=/tmp/tmp.rDiMori8hD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's/backupName:/backupName: backup-minio-2-1/' + /usr/bin/sed -e 's/date:/date: 2025-05-29 13:54:41/' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kVHSqmwrVs perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-2-1 created + cat /tmp/tmp.rDiMori8hD + rm /tmp/tmp.kVHSqmwrVs /tmp/tmp.rDiMori8hD + return 0 + wait_restore backup-minio-2-1 some-name requested 0 900 1 + local backup_name=backup-minio-2-1 + local cluster_name=some-name + local target_state=requested + local wait_cluster_consistency=0 + local wait_time=900 + local ok_if_ready=1 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-2-1 object to be createdOK Waiting psmdb-restore/restore-backup-minio-2-1 to reach state "requested" OK + [[ 0 -eq 1 ]] + wait_restore backup-minio-2-1 some-name ready 0 1600 + local backup_name=backup-minio-2-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=0 + local wait_time=1600 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-2-1 object to be createdOK Waiting psmdb-restore/restore-backup-minio-2-1 to reach state "ready" OK + [[ 0 -eq 1 ]] + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W1Y0o7VN23 +++ mktemp ++ local LAST_ERR=/tmp/tmp.joD8uwSMZI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.W1Y0o7VN23 ++ cat /tmp/tmp.joD8uwSMZI ++ rm /tmp/tmp.W1Y0o7VN23 /tmp/tmp.joD8uwSMZI ++ return 0 + [[ false == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WUnIkf3wky +++ mktemp ++ local LAST_ERR=/tmp/tmp.vxr55PMpdB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WUnIkf3wky ++ cat /tmp/tmp.vxr55PMpdB ++ rm /tmp/tmp.WUnIkf3wky /tmp/tmp.vxr55PMpdB ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.za7hSNdTJk +++ mktemp ++ local LAST_ERR=/tmp/tmp.rAGko0BdvW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.za7hSNdTJk ++ cat /tmp/tmp.rAGko0BdvW ++ rm /tmp/tmp.za7hSNdTJk /tmp/tmp.rAGko0BdvW ++ return 0 + [[ false == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + compare_mongo_cmd find myApp:myPass@some-name-rs0.multi-storage-23296 -2nd .svc.cluster.local myApp test 'sort( { x: 1 } )' + local command=find + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local 'sort=sort( { x: 1 } )' + local 'full_command=db.test.find()' + [[ ! -z sort( { x: 1 } ) ]] + full_command='db.test.find().sort( { x: 1 } )' + log 'running db.test.find().sort( { x: 1 } ) in myApp' + set +o xtrace [2025-05-29T13:55:57+0000] running db.test.find().sort( { x: 1 } ) in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find().sort( { x: 1 } )' myApp:myPass@some-name-rs0.multi-storage-23296 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find().sort( { x: 1 } )' + local uri=myApp:myPass@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.THSNKwNVLw +++ mktemp ++ local LAST_ERR=/tmp/tmp.DqXSarQcyF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.THSNKwNVLw ++ cat /tmp/tmp.DqXSarQcyF ++ rm /tmp/tmp.THSNKwNVLw /tmp/tmp.DqXSarQcyF ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.find().sort( { x: 1 } )\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.efgeOfKBM6 ++ mktemp + local LAST_ERR=/tmp/tmp.WzStFJub8m + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use myApp\n db.test.find().sort( { x: 1 } )\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.efgeOfKBM6 + cat /tmp/tmp.WzStFJub8m + rm /tmp/tmp.efgeOfKBM6 /tmp/tmp.WzStFJub8m + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/e2e-tests/multi-storage/compare/find-2nd.json /tmp/tmp.I7LLtUMviw/find-2nd + log 'dropping pbmBackups collection and starting resync' + set +o xtrace [2025-05-29T13:56:00+0000] dropping pbmBackups collection and starting resync + drop_metadata_and_resync ++ kubectl get psmdb-backup --no-headers ++ wc -l + local psmdb_backup_count=4 + log 'there are psmdb-backups in multi-storage-23296' + set +o xtrace [2025-05-29T13:56:01+0000] there are psmdb-backups in multi-storage-23296 + drop_collection admin pbmBackups backup backup123456 + local db=admin + local collection=pbmBackups + local user=backup + local pass=backup123456 + log 'dropping collection: admin.pbmBackups' + set +o xtrace [2025-05-29T13:56:01+0000] dropping collection: admin.pbmBackups + run_mongo 'use admin\n db.pbmBackups.drop()' backup:backup123456@some-name-rs0.multi-storage-23296 mongodb + local 'command=use admin\n db.pbmBackups.drop()' + local uri=backup:backup123456@some-name-rs0.multi-storage-23296 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qoZFDfxbxw +++ mktemp ++ local LAST_ERR=/tmp/tmp.iIQNOTZ3lu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qoZFDfxbxw ++ cat /tmp/tmp.iIQNOTZ3lu ++ rm /tmp/tmp.qoZFDfxbxw /tmp/tmp.iIQNOTZ3lu ++ return 0 + local client_container=psmdb-client-66f577db5f-qxsc6 + local mongo_flag= + [[ backup:backup123456@some-name-rs0.multi-storage-23296 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use admin\n db.pbmBackups.drop()\n'\'' | mongo mongodb://backup:backup123456@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.c4F7MEF50O ++ mktemp + local LAST_ERR=/tmp/tmp.6pjg5Wnk7S + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-qxsc6 -- bash -c 'printf '\''use admin\n db.pbmBackups.drop()\n'\'' | mongo mongodb://backup:backup123456@some-name-rs0.multi-storage-23296.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.c4F7MEF50O Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0.multi-storage-23296.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("3e46a6da-cdca-43d5-a5a2-ec5ce8c4217a") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db admin true bye + cat /tmp/tmp.6pjg5Wnk7S + rm /tmp/tmp.c4F7MEF50O /tmp/tmp.6pjg5Wnk7S + return 0 + kubectl_bin annotate psmdb some-name percona.com/resync-pbm=true ++ mktemp + local LAST_OUT=/tmp/tmp.kEiARMrw7K ++ mktemp + local LAST_ERR=/tmp/tmp.fJmP9IUbaE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl annotate psmdb some-name percona.com/resync-pbm=true + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kEiARMrw7K perconaservermongodb.psmdb.percona.com/some-name annotated + cat /tmp/tmp.fJmP9IUbaE + rm /tmp/tmp.kEiARMrw7K /tmp/tmp.fJmP9IUbaE + return 0 + sleep 10 + local retries=0 + echo -n 'waiting for resync to finish' waiting for resync to finish++ kubectl_bin get psmdb some-name -o yaml ++ yq '.metadata.annotations."percona.com/resync-in-progress"' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pteiBx0oCr +++ mktemp ++ local LAST_ERR=/tmp/tmp.wbbWvb0aTP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o yaml ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pteiBx0oCr ++ cat /tmp/tmp.wbbWvb0aTP ++ rm /tmp/tmp.pteiBx0oCr /tmp/tmp.wbbWvb0aTP ++ return 0 + local resync_running=null + [[ null == \n\u\l\l ]] + echo .OK .OK ++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm list -o json ++ jq '.snapshots | length' + local backup_count=4 + log 'there are 4 backups in pbmBackups collection after resync' + set +o xtrace [2025-05-29T13:56:19+0000] there are 4 backups in pbmBackups collection after resync ++ echo 4 ++ xargs ++ echo 4 ++ xargs + [[ 4 != 4 ]] + log 'resync finished successfully' + set +o xtrace [2025-05-29T13:56:19+0000] resync finished successfully + log 'checking backup deletion' + set +o xtrace [2025-05-29T13:56:19+0000] checking backup deletion + check_backup_deletion ++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++ jq '.backups.pitrChunks.pitrChunks[0].range.start' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rP49c4VJsV +++ mktemp ++ local LAST_ERR=/tmp/tmp.KImnQD2tCo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rP49c4VJsV ++ cat /tmp/tmp.KImnQD2tCo ++ rm /tmp/tmp.rP49c4VJsV /tmp/tmp.KImnQD2tCo ++ return 0 + local initial_chunk_start=1748526733 + log 'deleting psmdb-backup/backup-minio-3' + set +o xtrace [2025-05-29T13:56:21+0000] deleting psmdb-backup/backup-minio-3 + kubectl_bin delete psmdb-backup backup-minio-3 ++ mktemp + local LAST_OUT=/tmp/tmp.dCPp3ENjaS ++ mktemp + local LAST_ERR=/tmp/tmp.MAg1Rycn7e + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup backup-minio-3 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dCPp3ENjaS perconaservermongodbbackup.psmdb.percona.com "backup-minio-3" deleted + cat /tmp/tmp.MAg1Rycn7e + rm /tmp/tmp.dCPp3ENjaS /tmp/tmp.MAg1Rycn7e + return 0 + log 'checking if chunks are deleted' + set +o xtrace [2025-05-29T13:56:22+0000] checking if chunks are deleted ++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++ jq '.backups.pitrChunks.pitrChunks[0].range.start' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KIxHYNI61e +++ mktemp ++ local LAST_ERR=/tmp/tmp.uiWyUj1Z5N ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KIxHYNI61e ++ cat /tmp/tmp.uiWyUj1Z5N ++ rm /tmp/tmp.KIxHYNI61e /tmp/tmp.uiWyUj1Z5N ++ return 0 + local chunk_start=1748526733 + [[ 1748526733 != 1748526733 ]] + log 'chunks are not deleted, OK.' + set +o xtrace [2025-05-29T13:56:24+0000] chunks are not deleted, OK. + log 'deleting psmdb-backup/backup-minio-2-1' + set +o xtrace [2025-05-29T13:56:24+0000] deleting psmdb-backup/backup-minio-2-1 + kubectl_bin delete psmdb-backup backup-minio-2-1 ++ mktemp + local LAST_OUT=/tmp/tmp.m3RA5q1t8e ++ mktemp + local LAST_ERR=/tmp/tmp.ZKwSipa4F7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup backup-minio-2-1 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.m3RA5q1t8e perconaservermongodbbackup.psmdb.percona.com "backup-minio-2-1" deleted + cat /tmp/tmp.ZKwSipa4F7 + rm /tmp/tmp.m3RA5q1t8e /tmp/tmp.ZKwSipa4F7 + return 0 + log 'checking if chunks are deleted' + set +o xtrace [2025-05-29T13:56:25+0000] checking if chunks are deleted ++ kubectl_bin exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++ jq '.backups.pitrChunks.pitrChunks[0].range.start' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DHImK1TB5R +++ mktemp ++ local LAST_ERR=/tmp/tmp.czNDlQXTcX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec some-name-rs0-0 -c backup-agent -- pbm status -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DHImK1TB5R ++ cat /tmp/tmp.czNDlQXTcX ++ rm /tmp/tmp.DHImK1TB5R /tmp/tmp.czNDlQXTcX ++ return 0 + chunk_start=1748526821 + [[ 1748526733 == 1748526821 ]] + log 'chunks are deleted, OK.' + set +o xtrace [2025-05-29T13:56:27+0000] chunks are deleted, OK. + destroy multi-storage-23296 + local namespace=multi-storage-23296 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ wc -l ++ kubectl_bin get psmdb-backup --no-headers +++ mktemp ++ local LAST_OUT=/tmp/tmp.vlTYOmdnzL +++ mktemp ++ local LAST_ERR=/tmp/tmp.Imiq3OIh0G ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vlTYOmdnzL ++ cat /tmp/tmp.Imiq3OIh0G ++ rm /tmp/tmp.vlTYOmdnzL /tmp/tmp.Imiq3OIh0G ++ return 0 + '[' 2 '!=' 0 ']' + kubectl_bin get psmdb-backup ++ mktemp + local LAST_OUT=/tmp/tmp.mhmzAZxLXY ++ mktemp + local LAST_ERR=/tmp/tmp.qJpFGfQgCM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get psmdb-backup + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mhmzAZxLXY NAME CLUSTER STORAGE DESTINATION TYPE SIZE STATUS COMPLETED AGE backup-minio-1 some-name minio-1 s3://operator-testing/2025-05-29T13:40:53Z logical 48.10KB ready 15m 15m backup-minio-2 some-name minio-2 s3://operator-testing-2/2025-05-29T13:41:06Z logical 62.20KB ready 15m 15m + cat /tmp/tmp.qJpFGfQgCM + rm /tmp/tmp.mhmzAZxLXY /tmp/tmp.qJpFGfQgCM + return 0 + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.fBvZhIEPpK ++ mktemp + local LAST_ERR=/tmp/tmp.sepxtEO51u + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fBvZhIEPpK perconaservermongodbbackup.psmdb.percona.com "backup-minio-1" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio-2" deleted + cat /tmp/tmp.sepxtEO51u + rm /tmp/tmp.fBvZhIEPpK /tmp/tmp.sepxtEO51u + return 0 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.PybWYkuge0 ++ mktemp + local LAST_ERR=/tmp/tmp.c5s0ZfJWU8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PybWYkuge0 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.c5s0ZfJWU8 + rm /tmp/tmp.PybWYkuge0 /tmp/tmp.c5s0ZfJWU8 + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.iJ98jBx1u3 ++ mktemp + local LAST_ERR=/tmp/tmp.h9n942izfR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iJ98jBx1u3 + cat /tmp/tmp.h9n942izfR + rm /tmp/tmp.iJ98jBx1u3 /tmp/tmp.h9n942izfR + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.Vm65TcLVOY ++ mktemp + local LAST_ERR=/tmp/tmp.w84X5Q7iBP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Vm65TcLVOY + cat /tmp/tmp.w84X5Q7iBP + rm /tmp/tmp.Vm65TcLVOY /tmp/tmp.w84X5Q7iBP + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.PUffD3GVmz ++ mktemp + local LAST_ERR=/tmp/tmp.UhLuz0P526 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PUffD3GVmz + cat /tmp/tmp.UhLuz0P526 + rm /tmp/tmp.PUffD3GVmz /tmp/tmp.UhLuz0P526 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.6pMc2QK0by ++ mktemp + local LAST_ERR=/tmp/tmp.RctOPbzOqg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1926/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6pMc2QK0by clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.RctOPbzOqg + rm /tmp/tmp.6pMc2QK0by /tmp/tmp.RctOPbzOqg + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.V5mBmFeUCB ++ mktemp + local LAST_ERR=/tmp/tmp.FVJiVsDgox + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.V5mBmFeUCB + cat /tmp/tmp.FVJiVsDgox Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.V5mBmFeUCB + cat /tmp/tmp.FVJiVsDgox Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.V5mBmFeUCB + cat /tmp/tmp.FVJiVsDgox Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.V5mBmFeUCB + cat /tmp/tmp.FVJiVsDgox Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.V5mBmFeUCB /tmp/tmp.FVJiVsDgox + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace multi-storage-23296 + rm -rf /tmp/tmp.I7LLtUMviw ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp + log 'test passed' + local LAST_OUT=/tmp/tmp.7Bkylw6faY + set +o xtrace ++ mktemp + local LAST_OUT=/tmp/tmp.NiLj9TQmXW ++ mktemp + local LAST_ERR=/tmp/tmp.xsxVRpKS28 + local exit_status=0 + local timeout=4 [2025-05-29T13:57:19+0000] test passed ++ seq 0 2 + local LAST_ERR=/tmp/tmp.IJHjfzD43z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace multi-storage-23296 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator