Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/logs/demand-backup.log WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 + create_infra demand-backup-11568 + local ns=demand-backup-11568 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.0AuBNuevBy ++ mktemp + local LAST_ERR=/tmp/tmp.VSXVrgIVaU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0AuBNuevBy customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.VSXVrgIVaU + rm /tmp/tmp.0AuBNuevBy /tmp/tmp.VSXVrgIVaU + return 0 ++ grep -v '\-\-\-' ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.c3mjEMhKPe ++ mktemp + local LAST_ERR=/tmp/tmp.JuA1jxJbiY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.c3mjEMhKPe + cat /tmp/tmp.JuA1jxJbiY + rm /tmp/tmp.c3mjEMhKPe /tmp/tmp.JuA1jxJbiY + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.SHoLm5afwM ++ mktemp + local LAST_ERR=/tmp/tmp.D1F9gbK1rq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SHoLm5afwM + cat /tmp/tmp.D1F9gbK1rq + rm /tmp/tmp.SHoLm5afwM /tmp/tmp.D1F9gbK1rq + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.kJtMdIl7lE ++ mktemp + local LAST_ERR=/tmp/tmp.LsGOxUOrI1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kJtMdIl7lE + cat /tmp/tmp.LsGOxUOrI1 + rm /tmp/tmp.kJtMdIl7lE /tmp/tmp.LsGOxUOrI1 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.Dfw8McTK04 ++ mktemp + local LAST_ERR=/tmp/tmp.nv3Yzrg99Q + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Dfw8McTK04 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.nv3Yzrg99Q + rm /tmp/tmp.Dfw8McTK04 /tmp/tmp.nv3Yzrg99Q + return 0 + check_crd_for_deletion PR-1935-106cb61c + local git_tag=PR-1935-106cb61c ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1935-106cb61c/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/bin/sed s/---//g ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EB0t8CEY8B +++ mktemp ++ local LAST_ERR=/tmp/tmp.hp4zdbSl27 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.EB0t8CEY8B ++ cat /tmp/tmp.hp4zdbSl27 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.EB0t8CEY8B ++ cat /tmp/tmp.hp4zdbSl27 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.EB0t8CEY8B ++ cat /tmp/tmp.hp4zdbSl27 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.EB0t8CEY8B ++ cat /tmp/tmp.hp4zdbSl27 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.EB0t8CEY8B /tmp/tmp.hp4zdbSl27 ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.b0fpnXLdTV ++ mktemp + local LAST_ERR=/tmp/tmp.odtwgOPAy9 + xargs kubectl delete ns + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + local LAST_OUT=/tmp/tmp.09OkesbkLx ++ mktemp + local LAST_ERR=/tmp/tmp.AznNXLQVj4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.09OkesbkLx + cat /tmp/tmp.AznNXLQVj4 + rm /tmp/tmp.09OkesbkLx /tmp/tmp.AznNXLQVj4 + return 0 namespace "demand-backup-11423" deleted namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.b0fpnXLdTV namespace "psmdb-operator" deleted + cat /tmp/tmp.odtwgOPAy9 + rm /tmp/tmp.b0fpnXLdTV /tmp/tmp.odtwgOPAy9 + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ll6ChL2iy9 ++ mktemp + local LAST_ERR=/tmp/tmp.HfP7L9wvPL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ll6ChL2iy9 + cat /tmp/tmp.HfP7L9wvPL + rm /tmp/tmp.ll6ChL2iy9 /tmp/tmp.HfP7L9wvPL + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.yegeDhbzGI ++ mktemp + local LAST_ERR=/tmp/tmp.6CFnh6YSPb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yegeDhbzGI namespace/psmdb-operator created + cat /tmp/tmp.6CFnh6YSPb + rm /tmp/tmp.yegeDhbzGI /tmp/tmp.6CFnh6YSPb + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vbfz2N2jFv +++ mktemp ++ local LAST_ERR=/tmp/tmp.ROVy17ZBKU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Vbfz2N2jFv ++ cat /tmp/tmp.ROVy17ZBKU ++ rm /tmp/tmp.Vbfz2N2jFv /tmp/tmp.ROVy17ZBKU ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1935-106cb61c-4-cluster10 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.GhZtFcky84 ++ mktemp + local LAST_ERR=/tmp/tmp.6LsF3cTtLw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1935-106cb61c-4-cluster10 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GhZtFcky84 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1935-106cb61c-4-cluster10" modified. + cat /tmp/tmp.6LsF3cTtLw + rm /tmp/tmp.GhZtFcky84 /tmp/tmp.6LsF3cTtLw + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.e5fVvim3vC ++ mktemp + local LAST_ERR=/tmp/tmp.qeq81cXa1f + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.e5fVvim3vC customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.qeq81cXa1f + rm /tmp/tmp.e5fVvim3vC /tmp/tmp.qeq81cXa1f + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: psmdb-operator^' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.EC9SvweSEL ++ mktemp + local LAST_ERR=/tmp/tmp.VYP4JsKBdM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EC9SvweSEL clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.VYP4JsKBdM + rm /tmp/tmp.EC9SvweSEL /tmp/tmp.VYP4JsKBdM + return 0 + kubectl_bin apply -f - + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1935-106cb61c") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.ZMnc54W01p ++ mktemp + local LAST_ERR=/tmp/tmp.RWOlUjGTlV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZMnc54W01p deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.RWOlUjGTlV + rm /tmp/tmp.ZMnc54W01p /tmp/tmp.RWOlUjGTlV + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.oFbIAaY8cT +++ mktemp ++ local LAST_ERR=/tmp/tmp.E1HBih85pn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oFbIAaY8cT ++ cat /tmp/tmp.E1HBih85pn ++ rm /tmp/tmp.oFbIAaY8cT /tmp/tmp.E1HBih85pn ++ return 0 + wait_pod percona-server-mongodb-operator-55966bfd7b-f2rzs + local pod=percona-server-mongodb-operator-55966bfd7b-f2rzs + set +o xtrace waiting for pod/percona-server-mongodb-operator-55966bfd7b-f2rzs to be ready.OK + create_namespace demand-backup-11568 + local namespace=demand-backup-11568 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl api-resources ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces demand-backup-11568' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-11568 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace demand-backup-11568 --ignore-not-found + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.OZ81SAbohU + local LAST_OUT=/tmp/tmp.xYHXuXqDHa ++ mktemp + local LAST_ERR=/tmp/tmp.UAL97mj1ur + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.mKILeXzE8p + local exit_status=0 + local timeout=4 + for i in '$(seq 0 2)' + set +e + kubectl get ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace demand-backup-11568 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xYHXuXqDHa + cat /tmp/tmp.mKILeXzE8p + rm /tmp/tmp.xYHXuXqDHa /tmp/tmp.mKILeXzE8p + return 0 + kubectl_bin wait --for=delete namespace demand-backup-11568 ++ mktemp + local LAST_OUT=/tmp/tmp.JJ8DTXxFSu ++ mktemp + local LAST_ERR=/tmp/tmp.kYAiUk1stx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace demand-backup-11568 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OZ81SAbohU + cat /tmp/tmp.UAL97mj1ur + rm /tmp/tmp.OZ81SAbohU /tmp/tmp.UAL97mj1ur + return 0 namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JJ8DTXxFSu + cat /tmp/tmp.kYAiUk1stx + rm /tmp/tmp.JJ8DTXxFSu /tmp/tmp.kYAiUk1stx + return 0 + desc 'create namespace demand-backup-11568' + set +o xtrace ----------------------------------------------------------------------------------- create namespace demand-backup-11568 ----------------------------------------------------------------------------------- + kubectl_bin create namespace demand-backup-11568 ++ mktemp + local LAST_OUT=/tmp/tmp.3Wz0Xwilif ++ mktemp + local LAST_ERR=/tmp/tmp.bvm3vyaBf7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace demand-backup-11568 namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3Wz0Xwilif namespace/demand-backup-11568 created + cat /tmp/tmp.bvm3vyaBf7 + rm /tmp/tmp.3Wz0Xwilif /tmp/tmp.bvm3vyaBf7 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y2XBXSl6cT +++ mktemp ++ local LAST_ERR=/tmp/tmp.UFbGt3I4Hg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Y2XBXSl6cT ++ cat /tmp/tmp.UFbGt3I4Hg ++ rm /tmp/tmp.Y2XBXSl6cT /tmp/tmp.UFbGt3I4Hg ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1935-106cb61c-4-cluster10 --namespace=demand-backup-11568 ++ mktemp + local LAST_OUT=/tmp/tmp.yK1PMnqZNA ++ mktemp + local LAST_ERR=/tmp/tmp.b7oxleAf32 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1935-106cb61c-4-cluster10 --namespace=demand-backup-11568 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yK1PMnqZNA Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1935-106cb61c-4-cluster10" modified. + cat /tmp/tmp.b7oxleAf32 + rm /tmp/tmp.yK1PMnqZNA /tmp/tmp.b7oxleAf32 + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Tue May 20 08:28:00 2025 NAMESPACE: demand-backup-11568 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-11568.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-11568 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-11568 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-11568 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-11568 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KDd7mDXAu9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iU89yKE4J0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KDd7mDXAu9 ++ cat /tmp/tmp.iU89yKE4J0 ++ rm /tmp/tmp.KDd7mDXAu9 /tmp/tmp.iU89yKE4J0 ++ return 0 + MINIO_POD=minio-service-8967c7f7f-sds8k + wait_pod minio-service-8967c7f7f-sds8k + local pod=minio-service-8967c7f7f-sds8k + set +o xtrace waiting for pod/minio-service-8967c7f7f-sds8k to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-11568.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.LA3ivewLR4 ++ mktemp + local LAST_ERR=/tmp/tmp.b4wMkGA570 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-11568.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LA3ivewLR4 service/minio-service created + cat /tmp/tmp.b4wMkGA570 + rm /tmp/tmp.LA3ivewLR4 /tmp/tmp.b4wMkGA570 + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.hMQdquAEh1 ++ mktemp + local LAST_ERR=/tmp/tmp.QVK1TL2ie6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hMQdquAEh1 make_bucket: operator-testing pod "aws-cli" deleted + cat /tmp/tmp.QVK1TL2ie6 If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-11568 + rm /tmp/tmp.hMQdquAEh1 /tmp/tmp.QVK1TL2ie6 + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + cluster=some-name-rs0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.9QwfodJaSS ++ mktemp + local LAST_ERR=/tmp/tmp.RIurxCWhb6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9QwfodJaSS secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.RIurxCWhb6 + rm /tmp/tmp.9QwfodJaSS /tmp/tmp.RIurxCWhb6 + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.UnaNMMZEYn ++ mktemp + local LAST_ERR=/tmp/tmp.OALR4gcDbj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UnaNMMZEYn secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.OALR4gcDbj + rm /tmp/tmp.UnaNMMZEYn /tmp/tmp.OALR4gcDbj + return 0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/some-name-rs0.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod8.0"' ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + yq eval '.spec.upgradeOptions.apply="Never"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1935-106cb61c"' + local LAST_OUT=/tmp/tmp.wjtbrNhGzU + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ mktemp + local LAST_ERR=/tmp/tmp.kNddOid1qu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wjtbrNhGzU perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.kNddOid1qu + rm /tmp/tmp.wjtbrNhGzU /tmp/tmp.kNddOid1qu + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.......OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.....OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rqEs56UuAT +++ mktemp ++ local LAST_ERR=/tmp/tmp.daGbJ0EEvO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rqEs56UuAT ++ cat /tmp/tmp.daGbJ0EEvO ++ rm /tmp/tmp.rqEs56UuAT /tmp/tmp.daGbJ0EEvO ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bclgaHfG1u +++ mktemp ++ local LAST_ERR=/tmp/tmp.zA6b2cKqeb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bclgaHfG1u ++ cat /tmp/tmp.zA6b2cKqeb ++ rm /tmp/tmp.bclgaHfG1u /tmp/tmp.zA6b2cKqeb ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness... + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.hzgVPPt15y/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 ++ mktemp + local LAST_OUT=/tmp/tmp.H1nABXI6RD + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-11568", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_ERR=/tmp/tmp.uftySBddA2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.H1nABXI6RD + cat /tmp/tmp.uftySBddA2 + rm /tmp/tmp.H1nABXI6RD /tmp/tmp.uftySBddA2 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.hzgVPPt15y/statefulset_some-name-rs0.yml + version_gt 1.22 ++ bc -l ++ echo '1.30 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.hzgVPPt15y/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.hzgVPPt15y/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml /tmp/tmp.hzgVPPt15y/statefulset_some-name-rs0.yml + desc 'create user' + set +o xtrace ----------------------------------------------------------------------------------- create user ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.glqAZIiu5s +++ mktemp ++ local LAST_ERR=/tmp/tmp.GelHPEN7Jl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.glqAZIiu5s ++ cat /tmp/tmp.GelHPEN7Jl ++ rm /tmp/tmp.glqAZIiu5s /tmp/tmp.GelHPEN7Jl ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jwdsYav495 ++ mktemp + local LAST_ERR=/tmp/tmp.yJgGgoSChH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jwdsYav495 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("aa45368e-3b96-43c0-be0e-5517756bd959") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.yJgGgoSChH + rm /tmp/tmp.jwdsYav495 /tmp/tmp.yJgGgoSChH + return 0 + sleep 2 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XjswbuAyVv +++ mktemp ++ local LAST_ERR=/tmp/tmp.AHHj5HMGXO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XjswbuAyVv ++ cat /tmp/tmp.AHHj5HMGXO ++ rm /tmp/tmp.XjswbuAyVv /tmp/tmp.AHHj5HMGXO ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.7WBtTwbJjR ++ mktemp + local LAST_ERR=/tmp/tmp.EBRFsc9Eq0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7WBtTwbJjR Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("17bb7cf6-4442-4797-9427-0bbacd58ea33") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.EBRFsc9Eq0 + rm /tmp/tmp.7WBtTwbJjR /tmp/tmp.EBRFsc9Eq0 + return 0 + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9o1zfVBxkH +++ mktemp ++ local LAST_ERR=/tmp/tmp.cLpRJHOr6e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9o1zfVBxkH ++ cat /tmp/tmp.cLpRJHOr6e ++ rm /tmp/tmp.9o1zfVBxkH /tmp/tmp.cLpRJHOr6e ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LPtl5PsrEO ++ mktemp + local LAST_ERR=/tmp/tmp.strvbW5Tif + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LPtl5PsrEO Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("b325fa3c-f8b1-4800-86bc-d580a005d852") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.strvbW5Tif + rm /tmp/tmp.LPtl5PsrEO /tmp/tmp.strvbW5Tif + return 0 + run_mongo 'use myApp\n db.test3.insert({ x: 100502 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test3.insert({ x: 100502 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JXk1n1afTp +++ mktemp ++ local LAST_ERR=/tmp/tmp.iGF08qLzoV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JXk1n1afTp ++ cat /tmp/tmp.iGF08qLzoV ++ rm /tmp/tmp.JXk1n1afTp /tmp/tmp.iGF08qLzoV ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test3.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bD6RGPYOWx ++ mktemp + local LAST_ERR=/tmp/tmp.jQJwsizzz3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test3.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bD6RGPYOWx Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("4796df6d-97e9-4906-85a5-2a39c5db7c93") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.jQJwsizzz3 + rm /tmp/tmp.bD6RGPYOWx /tmp/tmp.jQJwsizzz3 + return 0 + custom_user_name=test1user + custom_role_name=test1role + run_mongo 'use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dIZqRfb2mO +++ mktemp ++ local LAST_ERR=/tmp/tmp.K5Z8yGaukG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dIZqRfb2mO ++ cat /tmp/tmp.K5Z8yGaukG ++ rm /tmp/tmp.dIZqRfb2mO /tmp/tmp.K5Z8yGaukG ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ONvG8dUfx5 ++ mktemp + local LAST_ERR=/tmp/tmp.KVokpJXIZT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ONvG8dUfx5 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("c5aa1eac-a6f5-49ce-8c92-1df72e1f0d6b") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp Successfully added user: { "user" : "test1user", "roles" : [ ] } bye + cat /tmp/tmp.KVokpJXIZT + rm /tmp/tmp.ONvG8dUfx5 /tmp/tmp.KVokpJXIZT + return 0 + run_mongo 'use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KLI0PAjKv9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nlm1LgrLhk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KLI0PAjKv9 ++ cat /tmp/tmp.nlm1LgrLhk ++ rm /tmp/tmp.KLI0PAjKv9 /tmp/tmp.nlm1LgrLhk ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bvH2LvGFCv ++ mktemp + local LAST_ERR=/tmp/tmp.PuM9Azdlg0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bvH2LvGFCv Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("54c49e65-b033-4b93-ba7d-f3f6ebf948c2") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp { "role" : "test1role", "privileges" : [ ], "roles" : [ ] } bye + cat /tmp/tmp.PuM9Azdlg0 + rm /tmp/tmp.bvH2LvGFCv /tmp/tmp.PuM9Azdlg0 + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:30:36+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bG2XIqa8lx +++ mktemp ++ local LAST_ERR=/tmp/tmp.VbqmOGYMQS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bG2XIqa8lx ++ cat /tmp/tmp.VbqmOGYMQS ++ rm /tmp/tmp.bG2XIqa8lx /tmp/tmp.VbqmOGYMQS ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.CZkXEuL5xa ++ mktemp + local LAST_ERR=/tmp/tmp.QK9ArOCtwZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CZkXEuL5xa + cat /tmp/tmp.QK9ArOCtwZ + rm /tmp/tmp.CZkXEuL5xa /tmp/tmp.QK9ArOCtwZ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:30:39+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3BIwVaK1Rw + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_ERR=/tmp/tmp.RS0mG4e2Jb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3BIwVaK1Rw ++ cat /tmp/tmp.RS0mG4e2Jb ++ rm /tmp/tmp.3BIwVaK1Rw /tmp/tmp.RS0mG4e2Jb ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.85Zn2Q2Cka ++ mktemp + local LAST_ERR=/tmp/tmp.TBEzGOVixv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.85Zn2Q2Cka + cat /tmp/tmp.TBEzGOVixv + rm /tmp/tmp.85Zn2Q2Cka /tmp/tmp.TBEzGOVixv + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:30:42+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oakohJkUVa +++ mktemp ++ local LAST_ERR=/tmp/tmp.rlGYghMbNW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oakohJkUVa ++ cat /tmp/tmp.rlGYghMbNW ++ rm /tmp/tmp.oakohJkUVa /tmp/tmp.rlGYghMbNW ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.oFv0ISqXuL ++ mktemp + local LAST_ERR=/tmp/tmp.ooVILc6NUi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oFv0ISqXuL + cat /tmp/tmp.ooVILc6NUi + rm /tmp/tmp.oFv0ISqXuL /tmp/tmp.ooVILc6NUi + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2025-05-20T08:30:08.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2025-05-20T08:30:11.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2025-05-20T08:30:12.000+0000 I listening for the commands + backup_name_minio=backup-minio + '[' -z '' ']' + backup_name_aws=backup-aws-s3 + backup_name_gcp=backup-gcp-cs + backup_name_azure=backup-azure-blob + desc 'run backups' + set +o xtrace ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local type=logical + desc 'run backup backup-aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/backup-aws-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.rCj0Rj6q6X ++ mktemp + local LAST_ERR=/tmp/tmp.iiAQTO9JWl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rCj0Rj6q6X perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.iiAQTO9JWl + rm /tmp/tmp.rCj0Rj6q6X /tmp/tmp.iiAQTO9JWl + return 0 + run_backup gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + local type=logical + desc 'run backup backup-gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.bZdH8L3QUm ++ mktemp + local LAST_ERR=/tmp/tmp.JwLoMRCYPg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '.metadata.name = "backup-gcp-cs" | .spec.storageName = "gcp-cs" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/backup-gcp-cs.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bZdH8L3QUm perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.JwLoMRCYPg + rm /tmp/tmp.bZdH8L3QUm /tmp/tmp.JwLoMRCYPg + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local type=logical + desc 'run backup backup-azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/backup-azure-blob.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.DnMtpthNgc ++ mktemp + local LAST_ERR=/tmp/tmp.P3hTp4zEp2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DnMtpthNgc perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.P3hTp4zEp2 + rm /tmp/tmp.DnMtpthNgc /tmp/tmp.P3hTp4zEp2 + return 0 + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.1aiig7zE1C ++ mktemp + local LAST_ERR=/tmp/tmp.KddK79rVzE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/backup-minio.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1aiig7zE1C perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.KddK79rVzE + rm /tmp/tmp.1aiig7zE1C /tmp/tmp.KddK79rVzE + return 0 + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state. + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + local target_state=ready + set +o xtrace waiting for backup-gcp-cs to reach ready state........ + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state. + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state... + sleep 5 + '[' -z '' ']' + desc 'check backup and restore -- aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- ++ get_backup_dest backup-aws-s3 ++ local backup_name=backup-aws-s3 ++ kubectl_bin get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VYOa36Fham +++ mktemp ++ local LAST_ERR=/tmp/tmp.ogrQkqxMCT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VYOa36Fham ++ cat /tmp/tmp.ogrQkqxMCT ++ rm /tmp/tmp.VYOa36Fham /tmp/tmp.ogrQkqxMCT ++ return 0 + backup_dest_aws=operator-testing/psmdb-demand-backup/2025-05-20T08:30:52Z + curl -s https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-20T08:30:52Z/rs0/myApp.test.gz + gunzip + run_recovery_check backup-aws-s3 some-name-rs0 + local backup=backup-aws-s3 + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BEYiTiOioh +++ mktemp ++ local LAST_ERR=/tmp/tmp.lvD7S9WAIP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BEYiTiOioh ++ cat /tmp/tmp.lvD7S9WAIP ++ rm /tmp/tmp.BEYiTiOioh /tmp/tmp.lvD7S9WAIP ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SYE1VaFiub ++ mktemp + local LAST_ERR=/tmp/tmp.oa8lEmQ4Io + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SYE1VaFiub Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("6233825b-b16c-46cd-a88d-4266c7fa96b3") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.oa8lEmQ4Io + rm /tmp/tmp.SYE1VaFiub /tmp/tmp.oa8lEmQ4Io + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-11568 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:31:46+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-11568 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JmbS4NZbjv +++ mktemp ++ local LAST_ERR=/tmp/tmp.T8QQRiYRJH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JmbS4NZbjv ++ cat /tmp/tmp.T8QQRiYRJH ++ rm /tmp/tmp.JmbS4NZbjv /tmp/tmp.T8QQRiYRJH ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.CLaNqDQ11u ++ mktemp + local LAST_ERR=/tmp/tmp.z0jQOWfpkM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CLaNqDQ11u + cat /tmp/tmp.z0jQOWfpkM + rm /tmp/tmp.CLaNqDQ11u /tmp/tmp.z0jQOWfpkM + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.hzgVPPt15y/find-2nd + run_restore backup-aws-s3 + local backup_name=backup-aws-s3 + /usr/bin/sed -e 's/name:/name: restore-backup-aws-s3/' + kubectl_bin apply -f - + /usr/bin/sed -e 's/backupName:/backupName: backup-aws-s3/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/restore.yml ++ mktemp + local LAST_OUT=/tmp/tmp.6ABMQ1vYL6 ++ mktemp + local LAST_ERR=/tmp/tmp.dUvfL5fhWM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6ABMQ1vYL6 perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created + cat /tmp/tmp.dUvfL5fhWM + rm /tmp/tmp.6ABMQ1vYL6 /tmp/tmp.dUvfL5fhWM + return 0 + wait_restore backup-aws-s3 some-name + local backup_name=backup-aws-s3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-aws-s3 to reach ready state.......OK + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7H8HULwnr2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tDmlwvVtDp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7H8HULwnr2 ++ cat /tmp/tmp.tDmlwvVtDp ++ rm /tmp/tmp.7H8HULwnr2 /tmp/tmp.tDmlwvVtDp ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:32:14+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.1JdvUNRfwu +++ mktemp ++ local LAST_ERR=/tmp/tmp.QvaRgz0BOt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1JdvUNRfwu ++ cat /tmp/tmp.QvaRgz0BOt ++ rm /tmp/tmp.1JdvUNRfwu /tmp/tmp.QvaRgz0BOt ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.VvPsVwg38I ++ mktemp + local LAST_ERR=/tmp/tmp.1ywjNSvcYK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VvPsVwg38I + cat /tmp/tmp.1ywjNSvcYK + rm /tmp/tmp.VvPsVwg38I /tmp/tmp.1ywjNSvcYK + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:32:16+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cBmtsaWPBK +++ mktemp ++ local LAST_ERR=/tmp/tmp.LZlOjF6ijS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cBmtsaWPBK ++ cat /tmp/tmp.LZlOjF6ijS ++ rm /tmp/tmp.cBmtsaWPBK /tmp/tmp.LZlOjF6ijS ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.HpVjEtIEfz ++ mktemp + local LAST_ERR=/tmp/tmp.5pClhREW1X + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HpVjEtIEfz + cat /tmp/tmp.5pClhREW1X + rm /tmp/tmp.HpVjEtIEfz /tmp/tmp.5pClhREW1X + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:32:19+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FC4u59iNDb +++ mktemp ++ local LAST_ERR=/tmp/tmp.aF904Donx3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FC4u59iNDb ++ cat /tmp/tmp.aF904Donx3 ++ rm /tmp/tmp.FC4u59iNDb /tmp/tmp.aF904Donx3 ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.cLsjqzSRbf ++ mktemp + local LAST_ERR=/tmp/tmp.peWgCT59TE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cLsjqzSRbf + cat /tmp/tmp.peWgCT59TE + rm /tmp/tmp.cLsjqzSRbf /tmp/tmp.peWgCT59TE + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- ++ get_backup_dest backup-gcp-cs ++ local backup_name=backup-gcp-cs ++ kubectl_bin get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed -e 's/.json$//' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bFt2YG8K2d ++ sed 's|azure://||' +++ mktemp ++ local LAST_ERR=/tmp/tmp.IlFj70YUDR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bFt2YG8K2d ++ cat /tmp/tmp.IlFj70YUDR ++ rm /tmp/tmp.bFt2YG8K2d /tmp/tmp.IlFj70YUDR ++ return 0 + backup_dest_gcp=operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z + curl -s https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z/rs0/myApp.test.gz + gunzip + run_recovery_check backup-gcp-cs some-name-rs0 + local backup=backup-gcp-cs + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eEDqP2pxP8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.G8GKsWfSCQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eEDqP2pxP8 ++ cat /tmp/tmp.G8GKsWfSCQ ++ rm /tmp/tmp.eEDqP2pxP8 /tmp/tmp.G8GKsWfSCQ ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.KJ5Prtore1 ++ mktemp + local LAST_ERR=/tmp/tmp.LAFxXtY8T6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KJ5Prtore1 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("220b4e1f-1476-4834-95b3-cdc550c83983") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.LAFxXtY8T6 + rm /tmp/tmp.KJ5Prtore1 /tmp/tmp.LAFxXtY8T6 + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-11568 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:32:28+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-11568 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8yVwS1gNAs +++ mktemp ++ local LAST_ERR=/tmp/tmp.hC8o5U2GMG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8yVwS1gNAs ++ cat /tmp/tmp.hC8o5U2GMG ++ rm /tmp/tmp.8yVwS1gNAs /tmp/tmp.hC8o5U2GMG ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.o4Dvl3Sb78 ++ mktemp + local LAST_ERR=/tmp/tmp.zKR4OTsCve + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o4Dvl3Sb78 + cat /tmp/tmp.zKR4OTsCve + rm /tmp/tmp.o4Dvl3Sb78 /tmp/tmp.zKR4OTsCve + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.hzgVPPt15y/find-2nd + run_restore backup-gcp-cs + local backup_name=backup-gcp-cs + /usr/bin/sed -e 's/name:/name: restore-backup-gcp-cs/' + kubectl_bin apply -f - + /usr/bin/sed -e 's/backupName:/backupName: backup-gcp-cs/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/restore.yml ++ mktemp + local LAST_OUT=/tmp/tmp.8w0x8FqVRB ++ mktemp + local LAST_ERR=/tmp/tmp.RRrfONvlpg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8w0x8FqVRB perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created + cat /tmp/tmp.RRrfONvlpg + rm /tmp/tmp.8w0x8FqVRB /tmp/tmp.RRrfONvlpg + return 0 + wait_restore backup-gcp-cs some-name + local backup_name=backup-gcp-cs + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-gcp-cs to reach ready state......OK + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LKvbQD99WY +++ mktemp ++ local LAST_ERR=/tmp/tmp.w5ocKz8JQ2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LKvbQD99WY ++ cat /tmp/tmp.w5ocKz8JQ2 ++ rm /tmp/tmp.LKvbQD99WY /tmp/tmp.w5ocKz8JQ2 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:32:57+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QbXoTArvZM +++ mktemp ++ local LAST_ERR=/tmp/tmp.OtnUQ6WsHT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QbXoTArvZM ++ cat /tmp/tmp.OtnUQ6WsHT ++ rm /tmp/tmp.QbXoTArvZM /tmp/tmp.OtnUQ6WsHT ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.4rKfKMhpmv ++ mktemp + local LAST_ERR=/tmp/tmp.pDcnNxP0La + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4rKfKMhpmv + cat /tmp/tmp.pDcnNxP0La + rm /tmp/tmp.4rKfKMhpmv /tmp/tmp.pDcnNxP0La + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:33:00+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HIUzVAM0lZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.j34RnwCODJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HIUzVAM0lZ ++ cat /tmp/tmp.j34RnwCODJ ++ rm /tmp/tmp.HIUzVAM0lZ /tmp/tmp.j34RnwCODJ ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.osN9ByU4Ou ++ mktemp + local LAST_ERR=/tmp/tmp.7sFZFgfbnl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.osN9ByU4Ou + cat /tmp/tmp.7sFZFgfbnl + rm /tmp/tmp.osN9ByU4Ou /tmp/tmp.7sFZFgfbnl + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:33:02+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3bge7zPuQL +++ mktemp ++ local LAST_ERR=/tmp/tmp.ri7IgI2ScR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3bge7zPuQL ++ cat /tmp/tmp.ri7IgI2ScR ++ rm /tmp/tmp.3bge7zPuQL /tmp/tmp.ri7IgI2ScR ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bslpCmStCm ++ mktemp + local LAST_ERR=/tmp/tmp.yUoZpxlBsC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bslpCmStCm + cat /tmp/tmp.yUoZpxlBsC + rm /tmp/tmp.bslpCmStCm /tmp/tmp.yUoZpxlBsC + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- ++ get_backup_dest backup-azure-blob ++ local backup_name=backup-azure-blob ++ kubectl_bin get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.EwWFCLeER6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lPbiS7daW9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EwWFCLeER6 ++ cat /tmp/tmp.lPbiS7daW9 ++ rm /tmp/tmp.EwWFCLeER6 /tmp/tmp.lPbiS7daW9 ++ return 0 + backup_dest_azure=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z + gunzip + curl -s https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z/rs0/myApp.test.gz + run_recovery_check backup-azure-blob some-name-rs0 + local backup=backup-azure-blob + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dkHmGIKk9s +++ mktemp ++ local LAST_ERR=/tmp/tmp.42Q9Vbayvo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dkHmGIKk9s ++ cat /tmp/tmp.42Q9Vbayvo ++ rm /tmp/tmp.dkHmGIKk9s /tmp/tmp.42Q9Vbayvo ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.leNplTNvvf ++ mktemp + local LAST_ERR=/tmp/tmp.GlKr1r5mw6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.leNplTNvvf Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ea07f735-046d-42ee-bcc3-220ac93508a6") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.GlKr1r5mw6 + rm /tmp/tmp.leNplTNvvf /tmp/tmp.GlKr1r5mw6 + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-11568 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:33:08+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-11568 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zvViubmjNc +++ mktemp ++ local LAST_ERR=/tmp/tmp.2c2HuKFLer ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zvViubmjNc ++ cat /tmp/tmp.2c2HuKFLer ++ rm /tmp/tmp.zvViubmjNc /tmp/tmp.2c2HuKFLer ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.uJ4VaXJiva ++ mktemp + local LAST_ERR=/tmp/tmp.kokPnZyxVg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uJ4VaXJiva + cat /tmp/tmp.kokPnZyxVg + rm /tmp/tmp.uJ4VaXJiva /tmp/tmp.kokPnZyxVg + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.hzgVPPt15y/find-2nd + run_restore backup-azure-blob + local backup_name=backup-azure-blob + /usr/bin/sed -e 's/name:/name: restore-backup-azure-blob/' + /usr/bin/sed -e 's/backupName:/backupName: backup-azure-blob/' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/restore.yml ++ mktemp + local LAST_OUT=/tmp/tmp.nwk910JnzL ++ mktemp + local LAST_ERR=/tmp/tmp.WgDL5YFKWn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nwk910JnzL perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created + cat /tmp/tmp.WgDL5YFKWn + rm /tmp/tmp.nwk910JnzL /tmp/tmp.WgDL5YFKWn + return 0 + wait_restore backup-azure-blob some-name + local backup_name=backup-azure-blob + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-azure-blob to reach ready state......OK + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lvgBZJkjS2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.U2YxYeoPLM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lvgBZJkjS2 ++ cat /tmp/tmp.U2YxYeoPLM ++ rm /tmp/tmp.lvgBZJkjS2 /tmp/tmp.U2YxYeoPLM ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:33:34+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ykg2WGYKr6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.v56tvRdZDe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ykg2WGYKr6 ++ cat /tmp/tmp.v56tvRdZDe ++ rm /tmp/tmp.Ykg2WGYKr6 /tmp/tmp.v56tvRdZDe ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.JBxoQKvDsp ++ mktemp + local LAST_ERR=/tmp/tmp.DUDTVGVAwk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JBxoQKvDsp + cat /tmp/tmp.DUDTVGVAwk + rm /tmp/tmp.JBxoQKvDsp /tmp/tmp.DUDTVGVAwk + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:33:38+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local LAST_OUT=/tmp/tmp.WtmhuQhrNc +++ mktemp ++ local LAST_ERR=/tmp/tmp.dtKvSASUZF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WtmhuQhrNc ++ cat /tmp/tmp.dtKvSASUZF ++ rm /tmp/tmp.WtmhuQhrNc /tmp/tmp.dtKvSASUZF ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.lnK1zw8z3d ++ mktemp + local LAST_ERR=/tmp/tmp.j68tVy2Ly4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lnK1zw8z3d + cat /tmp/tmp.j68tVy2Ly4 + rm /tmp/tmp.lnK1zw8z3d /tmp/tmp.j68tVy2Ly4 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:33:40+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uUiy3m31z9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wbu1Qd6OS6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uUiy3m31z9 ++ cat /tmp/tmp.Wbu1Qd6OS6 ++ rm /tmp/tmp.uUiy3m31z9 /tmp/tmp.Wbu1Qd6OS6 ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.HbCp0zzfnY ++ mktemp + local LAST_ERR=/tmp/tmp.1U8CB1hFJ1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HbCp0zzfnY + cat /tmp/tmp.1U8CB1hFJ1 + rm /tmp/tmp.HbCp0zzfnY /tmp/tmp.1U8CB1hFJ1 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ sed -e 's/.json$//' ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed 's|azure://||' ++ sed 's|s3://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2CJ47BOdIE +++ mktemp ++ local LAST_ERR=/tmp/tmp.FKXgzxrs85 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2CJ47BOdIE ++ cat /tmp/tmp.FKXgzxrs85 ++ rm /tmp/tmp.2CJ47BOdIE /tmp/tmp.FKXgzxrs85 ++ return 0 + backup_dest_minio=operator-testing/2025-05-20T08:31:27Z + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-05-20T08:31:27Z/rs0/ + grep myApp.test.gz ++ mktemp + local LAST_OUT=/tmp/tmp.1rQS831DYK ++ mktemp + local LAST_ERR=/tmp/tmp.j019MOIYYD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-05-20T08:31:27Z/rs0/ + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1rQS831DYK + cat /tmp/tmp.j019MOIYYD If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-11568 + rm /tmp/tmp.1rQS831DYK /tmp/tmp.j019MOIYYD + return 0 2025-05-20 08:31:31 55 myApp.test.gz + run_recovery_check backup-minio some-name-rs0 + local backup=backup-minio + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FlWUdRhZf6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hLx5jjTf1t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FlWUdRhZf6 ++ cat /tmp/tmp.hLx5jjTf1t ++ rm /tmp/tmp.FlWUdRhZf6 /tmp/tmp.hLx5jjTf1t ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ZQV9dmwgPz ++ mktemp + local LAST_ERR=/tmp/tmp.3IY1wtDmor + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZQV9dmwgPz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("95ef1530-43df-489d-a1af-a6f1437b3821") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.3IY1wtDmor + rm /tmp/tmp.ZQV9dmwgPz /tmp/tmp.3IY1wtDmor + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-11568 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:33:54+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-11568 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AQ6mKZqgms +++ mktemp ++ local LAST_ERR=/tmp/tmp.uNyENUyykZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AQ6mKZqgms ++ cat /tmp/tmp.uNyENUyykZ ++ rm /tmp/tmp.AQ6mKZqgms /tmp/tmp.uNyENUyykZ ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.6khp89qJ1x ++ mktemp + local LAST_ERR=/tmp/tmp.a54W2rTT4t + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6khp89qJ1x + cat /tmp/tmp.a54W2rTT4t + rm /tmp/tmp.6khp89qJ1x /tmp/tmp.a54W2rTT4t + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.hzgVPPt15y/find-2nd + run_restore backup-minio + local backup_name=backup-minio + kubectl_bin apply -f - ++ mktemp + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio/' + /usr/bin/sed -e 's/backupName:/backupName: backup-minio/' + local LAST_OUT=/tmp/tmp.zd1lDvvflN ++ mktemp + local LAST_ERR=/tmp/tmp.xG3uTKkVjJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zd1lDvvflN perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created + cat /tmp/tmp.xG3uTKkVjJ + rm /tmp/tmp.zd1lDvvflN /tmp/tmp.xG3uTKkVjJ + return 0 + wait_restore backup-minio some-name + local backup_name=backup-minio + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio to reach ready state.....OK + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hOxPrAMur2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.n9OtGQEOxK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hOxPrAMur2 ++ cat /tmp/tmp.n9OtGQEOxK ++ rm /tmp/tmp.hOxPrAMur2 /tmp/tmp.n9OtGQEOxK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:34:22+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fDjydErp1w +++ mktemp ++ local LAST_ERR=/tmp/tmp.VlyZRbYmXy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fDjydErp1w ++ cat /tmp/tmp.VlyZRbYmXy ++ rm /tmp/tmp.fDjydErp1w /tmp/tmp.VlyZRbYmXy ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.hN1QEIII5Q ++ mktemp + local LAST_ERR=/tmp/tmp.XvoSZGNXjm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hN1QEIII5Q + cat /tmp/tmp.XvoSZGNXjm + rm /tmp/tmp.hN1QEIII5Q /tmp/tmp.XvoSZGNXjm + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:34:26+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7s3KcFMLax +++ mktemp ++ local LAST_ERR=/tmp/tmp.wEXm7LsQK9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7s3KcFMLax ++ cat /tmp/tmp.wEXm7LsQK9 ++ rm /tmp/tmp.7s3KcFMLax /tmp/tmp.wEXm7LsQK9 ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.fSwqZTbmEO ++ mktemp + local LAST_ERR=/tmp/tmp.omzAoof6mi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fSwqZTbmEO + cat /tmp/tmp.omzAoof6mi + rm /tmp/tmp.fSwqZTbmEO /tmp/tmp.omzAoof6mi + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:34:30+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.NFCS73GITx +++ mktemp ++ local LAST_ERR=/tmp/tmp.nepkfFCwC3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NFCS73GITx ++ cat /tmp/tmp.nepkfFCwC3 ++ rm /tmp/tmp.NFCS73GITx /tmp/tmp.nepkfFCwC3 ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LbzGhntjcl ++ mktemp + local LAST_ERR=/tmp/tmp.rgPoks9UTj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LbzGhntjcl + cat /tmp/tmp.rgPoks9UTj + rm /tmp/tmp.LbzGhntjcl /tmp/tmp.rgPoks9UTj + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + run_mongo 'use myApp\n db.dropUser("test1user")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.dropUser("test1user")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nANsPv3oDn +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ZVVamEZWh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nANsPv3oDn ++ cat /tmp/tmp.3ZVVamEZWh ++ rm /tmp/tmp.nANsPv3oDn /tmp/tmp.3ZVVamEZWh ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.dropUser("test1user")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.I415QeRmeD ++ mktemp + local LAST_ERR=/tmp/tmp.Ug1sSFYa4d + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.dropUser("test1user")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.I415QeRmeD Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("db18f3ca-e72a-4b6b-a3e5-76eea99344a0") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.Ug1sSFYa4d + rm /tmp/tmp.I415QeRmeD /tmp/tmp.Ug1sSFYa4d + return 0 + run_mongo 'use myApp\n db.dropRole("test1role")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.dropRole("test1role")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4bLSjvP2HO +++ mktemp ++ local LAST_ERR=/tmp/tmp.8A5KkBoypy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4bLSjvP2HO ++ cat /tmp/tmp.8A5KkBoypy ++ rm /tmp/tmp.4bLSjvP2HO /tmp/tmp.8A5KkBoypy ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.dropRole("test1role")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.w9UnUqv33h ++ mktemp + local LAST_ERR=/tmp/tmp.qEICvjHVBn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.dropRole("test1role")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.w9UnUqv33h Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("f3668044-3709-45ce-b276-46346e1de60d") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.qEICvjHVBn + rm /tmp/tmp.w9UnUqv33h /tmp/tmp.qEICvjHVBn + return 0 + desc 'selective restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- selective restore -- minio ----------------------------------------------------------------------------------- + run_recovery_check_selective backup-minio some-name-rs0 myApp test2 false test1user test1role + local backup=backup-minio + local cluster=some-name-rs0 + local database=myApp + local collection=test2 + local with_users_and_roles=false + local username=test1user + local role=test1role + restore_name=restore-backup-minio-selective + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X7kYCLZlA6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wAux9uI2rb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.X7kYCLZlA6 ++ cat /tmp/tmp.wAux9uI2rb ++ rm /tmp/tmp.X7kYCLZlA6 /tmp/tmp.wAux9uI2rb ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jRRlfSPnBD ++ mktemp + local LAST_ERR=/tmp/tmp.wptZHJBxqQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jRRlfSPnBD Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("f3af6f41-dcf0-47d9-a6d5-ffc2ebd45147") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.wptZHJBxqQ + rm /tmp/tmp.jRRlfSPnBD /tmp/tmp.wptZHJBxqQ + return 0 ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-11568 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-11568 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ grep -v 'switched to' ++ jq 'index("test2") != null' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Ueo8I8cjCO ++++ mktemp +++ local LAST_ERR=/tmp/tmp.86hTIzD17i +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Ueo8I8cjCO +++ cat /tmp/tmp.86hTIzD17i +++ rm /tmp/tmp.Ueo8I8cjCO /tmp/tmp.86hTIzD17i +++ return 0 ++ local client_container=psmdb-client-66f577db5f-4pv85 ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GzJxFg4H0J +++ mktemp ++ local LAST_ERR=/tmp/tmp.g3sMNpAivn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GzJxFg4H0J ++ cat /tmp/tmp.g3sMNpAivn ++ rm /tmp/tmp.GzJxFg4H0J /tmp/tmp.g3sMNpAivn ++ return 0 + [[ false == \t\r\u\e ]] + yq '.spec.backupName="backup-minio"' + yq '.spec.selective.namespaces[0]="myApp.test"' + yq .spec.selective.withUsersAndRoles=false + kubectl_bin apply -f - + yq '.metadata.name="restore-backup-minio-selective"' + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/restore.yml ++ mktemp + local LAST_OUT=/tmp/tmp.yU8aXlHzDH ++ mktemp + local LAST_ERR=/tmp/tmp.K2jNLHPVf7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yU8aXlHzDH perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-selective created + cat /tmp/tmp.K2jNLHPVf7 + rm /tmp/tmp.yU8aXlHzDH /tmp/tmp.K2jNLHPVf7 + return 0 + wait_restore backup-minio-selective some-name + local backup_name=backup-minio-selective + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio-selective to reach ready state.....OK + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JEO8Q1ddDs +++ mktemp ++ local LAST_ERR=/tmp/tmp.xbcdvQlNpU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JEO8Q1ddDs ++ cat /tmp/tmp.xbcdvQlNpU ++ rm /tmp/tmp.JEO8Q1ddDs /tmp/tmp.xbcdvQlNpU ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-11568 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-11568 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ grep -v 'switched to' ++ jq 'index("test2") != null' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.2Zjc5Z1X2Q ++++ mktemp +++ local LAST_ERR=/tmp/tmp.5hrIFmf9Ic +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.2Zjc5Z1X2Q +++ cat /tmp/tmp.5hrIFmf9Ic +++ rm /tmp/tmp.2Zjc5Z1X2Q /tmp/tmp.5hrIFmf9Ic +++ return 0 ++ local client_container=psmdb-client-66f577db5f-4pv85 ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uaBUC0Ojv2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KQgrZZkiNU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uaBUC0Ojv2 ++ cat /tmp/tmp.KQgrZZkiNU ++ rm /tmp/tmp.uaBUC0Ojv2 /tmp/tmp.KQgrZZkiNU ++ return 0 + [[ false == \t\r\u\e ]] + [[ false == \t\r\u\e ]] + [[ false == \f\a\l\s\e ]] ++ user_exists test1user ++ local username=test1user ++ jq 'any(.[]; ._id==myApp.test1user)' ++ run_mongo 'use myApp\n JSON.stringify(db.getUsers())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getUsers())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ grep -v 'switched to' jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1user) jq: 1 compile error +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.NhxbClbbbw ++++ mktemp +++ local LAST_ERR=/tmp/tmp.XOj1ngsYbg +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.NhxbClbbbw +++ cat /tmp/tmp.XOj1ngsYbg +++ rm /tmp/tmp.NhxbClbbbw /tmp/tmp.XOj1ngsYbg +++ return 0 ++ local client_container=psmdb-client-66f577db5f-4pv85 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IWsDY2CPVu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Kyy1ndOPgO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IWsDY2CPVu ++ cat /tmp/tmp.Kyy1ndOPgO ++ rm /tmp/tmp.IWsDY2CPVu /tmp/tmp.Kyy1ndOPgO ++ return 0 + [[ '' == \t\r\u\e ]] ++ role_exists test1role ++ local role=test1role ++ run_mongo 'use myApp\n JSON.stringify(db.getRoles())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getRoles())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'any(.[]; ._id==myApp.test1role)' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ grep -v 'switched to' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.pjP3rhsYrQ jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1role) ++++ mktemp +++ local LAST_ERR=/tmp/tmp.koYPyJRWmH +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 jq: 1 compile error +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.pjP3rhsYrQ +++ cat /tmp/tmp.koYPyJRWmH +++ rm /tmp/tmp.pjP3rhsYrQ /tmp/tmp.koYPyJRWmH +++ return 0 ++ local client_container=psmdb-client-66f577db5f-4pv85 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QKTGZPa0HS +++ mktemp ++ local LAST_ERR=/tmp/tmp.uUVLZladPC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QKTGZPa0HS ++ cat /tmp/tmp.uUVLZladPC ++ rm /tmp/tmp.QKTGZPa0HS /tmp/tmp.uUVLZladPC ++ return 0 + [[ '' == \t\r\u\e ]] + kubectl_bin delete psmdb-restore restore-backup-minio-selective ++ mktemp + local LAST_OUT=/tmp/tmp.sVMIMJW1jQ ++ mktemp + local LAST_ERR=/tmp/tmp.lRciVyJRbK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-restore restore-backup-minio-selective + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sVMIMJW1jQ perconaservermongodbrestore.psmdb.percona.com "restore-backup-minio-selective" deleted + cat /tmp/tmp.lRciVyJRbK + rm /tmp/tmp.sVMIMJW1jQ /tmp/tmp.lRciVyJRbK + return 0 + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MY1zxJshhA +++ mktemp ++ local LAST_ERR=/tmp/tmp.vnnK75mGkY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MY1zxJshhA ++ cat /tmp/tmp.vnnK75mGkY ++ rm /tmp/tmp.MY1zxJshhA /tmp/tmp.vnnK75mGkY ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.VSu6P2t7Zv ++ mktemp + local LAST_ERR=/tmp/tmp.GWFVi6Hc19 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VSu6P2t7Zv Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("3ab604cc-286d-461d-8d52-f7b8aa4c6ff7") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.GWFVi6Hc19 + rm /tmp/tmp.VSu6P2t7Zv /tmp/tmp.GWFVi6Hc19 + return 0 + desc 'selective restore with users and roles -- minio' + set +o xtrace ----------------------------------------------------------------------------------- selective restore with users and roles -- minio ----------------------------------------------------------------------------------- + run_recovery_check_selective backup-minio some-name-rs0 myApp test2 true test1user test1role + local backup=backup-minio + local cluster=some-name-rs0 + local database=myApp + local collection=test2 + local with_users_and_roles=true + local username=test1user + local role=test1role + restore_name=restore-backup-minio-selective + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vTpvERz8Z3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SPhohnzYJR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vTpvERz8Z3 ++ cat /tmp/tmp.SPhohnzYJR ++ rm /tmp/tmp.vTpvERz8Z3 /tmp/tmp.SPhohnzYJR ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.nll5AGNwYx ++ mktemp + local LAST_ERR=/tmp/tmp.5fLmi1LYcz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nll5AGNwYx Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d8751f88-dc13-48fa-81bf-e93c559e8414") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.5fLmi1LYcz + rm /tmp/tmp.nll5AGNwYx /tmp/tmp.5fLmi1LYcz + return 0 ++ collection_exists test2 ++ local collection=test2 ++ grep -v 'switched to' ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-11568 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-11568 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp ++ jq 'index("test2") != null' +++ local LAST_OUT=/tmp/tmp.C1VHiF7ZSy ++++ mktemp +++ local LAST_ERR=/tmp/tmp.qCzzmVnhnK +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.C1VHiF7ZSy +++ cat /tmp/tmp.qCzzmVnhnK +++ rm /tmp/tmp.C1VHiF7ZSy /tmp/tmp.qCzzmVnhnK +++ return 0 ++ local client_container=psmdb-client-66f577db5f-4pv85 ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DhfeBTm34B +++ mktemp ++ local LAST_ERR=/tmp/tmp.rHxL2MUE2m ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DhfeBTm34B ++ cat /tmp/tmp.rHxL2MUE2m ++ rm /tmp/tmp.DhfeBTm34B /tmp/tmp.rHxL2MUE2m ++ return 0 + [[ false == \t\r\u\e ]] + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/restore.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.myV31Ex5Gt ++ mktemp + local LAST_ERR=/tmp/tmp.HKCXi2BwyE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + yq '.spec.selective.namespaces[0]="myApp.test"' + kubectl apply -f - + yq '.metadata.name="restore-backup-minio-selective"' + yq '.spec.backupName="backup-minio"' + yq .spec.selective.withUsersAndRoles=true + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.myV31Ex5Gt perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-selective created + cat /tmp/tmp.HKCXi2BwyE + rm /tmp/tmp.myV31Ex5Gt /tmp/tmp.HKCXi2BwyE + return 0 + wait_restore backup-minio-selective some-name + local backup_name=backup-minio-selective + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio-selective to reach ready state....OK + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mxqdxdOjQM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ml24Uv8Ukk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mxqdxdOjQM ++ cat /tmp/tmp.Ml24Uv8Ukk ++ rm /tmp/tmp.mxqdxdOjQM /tmp/tmp.Ml24Uv8Ukk ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-11568 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ grep -v 'switched to' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-11568 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'index("test2") != null' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.4Jk34cl7Yl ++++ mktemp +++ local LAST_ERR=/tmp/tmp.vE19kMFeYG +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.4Jk34cl7Yl +++ cat /tmp/tmp.vE19kMFeYG +++ rm /tmp/tmp.4Jk34cl7Yl /tmp/tmp.vE19kMFeYG +++ return 0 ++ local client_container=psmdb-client-66f577db5f-4pv85 ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v3I8V0hI3N +++ mktemp ++ local LAST_ERR=/tmp/tmp.Srn9oBrgpc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.v3I8V0hI3N ++ cat /tmp/tmp.Srn9oBrgpc ++ rm /tmp/tmp.v3I8V0hI3N /tmp/tmp.Srn9oBrgpc ++ return 0 + [[ false == \t\r\u\e ]] + [[ true == \t\r\u\e ]] ++ user_exists test1user ++ local username=test1user ++ grep -v 'switched to' ++ jq 'any(.[]; ._id==myApp.test1user)' ++ run_mongo 'use myApp\n JSON.stringify(db.getUsers())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getUsers())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1user) jq: 1 compile error +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OQOqDgLpiE ++++ mktemp +++ local LAST_ERR=/tmp/tmp.hJOCubqKRt +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.OQOqDgLpiE +++ cat /tmp/tmp.hJOCubqKRt +++ rm /tmp/tmp.OQOqDgLpiE /tmp/tmp.hJOCubqKRt +++ return 0 ++ local client_container=psmdb-client-66f577db5f-4pv85 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E1pOxSxUU2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jvbz2veiAR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.E1pOxSxUU2 ++ cat /tmp/tmp.Jvbz2veiAR ++ rm /tmp/tmp.E1pOxSxUU2 /tmp/tmp.Jvbz2veiAR ++ return 0 + [[ '' == \f\a\l\s\e ]] ++ role_exists test1role ++ local role=test1role ++ run_mongo 'use myApp\n JSON.stringify(db.getRoles())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 '' '' --quiet ++ grep -v 'switched to' ++ local 'command=use myApp\n JSON.stringify(db.getRoles())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'any(.[]; ._id==myApp.test1role)' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1role) jq: 1 compile error +++ local LAST_OUT=/tmp/tmp.JWu60M6NuS ++++ mktemp +++ local LAST_ERR=/tmp/tmp.KjpjoZHqrm +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.JWu60M6NuS +++ cat /tmp/tmp.KjpjoZHqrm +++ rm /tmp/tmp.JWu60M6NuS /tmp/tmp.KjpjoZHqrm +++ return 0 ++ local client_container=psmdb-client-66f577db5f-4pv85 ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rvtOwzCVHJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.J8BZqUhH0N ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rvtOwzCVHJ ++ cat /tmp/tmp.J8BZqUhH0N ++ rm /tmp/tmp.rvtOwzCVHJ /tmp/tmp.J8BZqUhH0N ++ return 0 + [[ '' == \f\a\l\s\e ]] + desc 'restore from backup source, with storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, with storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ sed 's|s3://||' ++ sed -e 's/.json$//' ++ sed 's|azure://||' ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uLeNCB8Vy9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YjgbhHy7dz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uLeNCB8Vy9 ++ cat /tmp/tmp.YjgbhHy7dz ++ rm /tmp/tmp.uLeNCB8Vy9 /tmp/tmp.YjgbhHy7dz ++ return 0 + backup_dest_minio=operator-testing/2025-05-20T08:31:27Z + run_recovery_check_bkp_source backup-minio operator-testing/2025-05-20T08:31:27Z some-name-rs0 backup-minio-source-0 + local backup=backup-minio + local backup_dest=operator-testing/2025-05-20T08:31:27Z + local cluster=some-name-rs0 + local source=backup-minio-source-0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4yuuR2ab8D +++ mktemp ++ local LAST_ERR=/tmp/tmp.OTFRqQwkWJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4yuuR2ab8D ++ cat /tmp/tmp.OTFRqQwkWJ ++ rm /tmp/tmp.4yuuR2ab8D /tmp/tmp.OTFRqQwkWJ ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.yRLLTTIBFF ++ mktemp + local LAST_ERR=/tmp/tmp.jNdpaSkv6Z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yRLLTTIBFF Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("cbe8879d-1f3c-46fa-8e48-9906f83919e2") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.jNdpaSkv6Z + rm /tmp/tmp.yRLLTTIBFF /tmp/tmp.jNdpaSkv6Z + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-11568 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:36:09+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-11568 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2slsnBtoNl +++ mktemp ++ local LAST_ERR=/tmp/tmp.8HEYNJAlLP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2slsnBtoNl ++ cat /tmp/tmp.8HEYNJAlLP ++ rm /tmp/tmp.2slsnBtoNl /tmp/tmp.8HEYNJAlLP ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.NSZvPa4tR7 ++ mktemp + local LAST_ERR=/tmp/tmp.SslrQcasLg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NSZvPa4tR7 + cat /tmp/tmp.SslrQcasLg + rm /tmp/tmp.NSZvPa4tR7 /tmp/tmp.SslrQcasLg + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.hzgVPPt15y/find-2nd + run_restore_backupsource backup-minio-source-0 operator-testing/2025-05-20T08:31:27Z + local backupName=backup-minio-source-0 + local backupDest=operator-testing/2025-05-20T08:31:27Z + local storageName= + desc 'run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2025-05-20T08:31:27Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2025-05-20T08:31:27Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/restore-backupsource.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio-source-0/' + /usr/bin/sed -e 's|BACKUP-NAME|operator-testing/2025-05-20T08:31:27Z|' + /usr/bin/sed -e /storageName/d + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.BZ5brIMEct ++ mktemp + local LAST_ERR=/tmp/tmp.0mXzAl7X9Y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BZ5brIMEct perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-0 created + cat /tmp/tmp.0mXzAl7X9Y + rm /tmp/tmp.BZ5brIMEct /tmp/tmp.0mXzAl7X9Y + return 0 + return + wait_restore backup-minio-source-0 some-name + local backup_name=backup-minio-source-0 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio-source-0 to reach ready state.......OK + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FaqgC7TwTw +++ mktemp ++ local LAST_ERR=/tmp/tmp.y2fJKTMFAd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FaqgC7TwTw ++ cat /tmp/tmp.y2fJKTMFAd ++ rm /tmp/tmp.FaqgC7TwTw /tmp/tmp.y2fJKTMFAd ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:36:33+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.97UHZ0JgLg +++ mktemp ++ local LAST_ERR=/tmp/tmp.wW1NFQMVSf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.97UHZ0JgLg ++ cat /tmp/tmp.wW1NFQMVSf ++ rm /tmp/tmp.97UHZ0JgLg /tmp/tmp.wW1NFQMVSf ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.e6vq6NEMoe ++ mktemp + local LAST_ERR=/tmp/tmp.fJvwvVGy6V + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.e6vq6NEMoe + cat /tmp/tmp.fJvwvVGy6V + rm /tmp/tmp.e6vq6NEMoe /tmp/tmp.fJvwvVGy6V + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:36:37+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VAoaVWUedZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.CCLkWSUfT7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VAoaVWUedZ ++ cat /tmp/tmp.CCLkWSUfT7 ++ rm /tmp/tmp.VAoaVWUedZ /tmp/tmp.CCLkWSUfT7 ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Y2yQjNZntY ++ mktemp + local LAST_ERR=/tmp/tmp.SrbEDGjtCD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Y2yQjNZntY + cat /tmp/tmp.SrbEDGjtCD + rm /tmp/tmp.Y2yQjNZntY /tmp/tmp.SrbEDGjtCD + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:36:40+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.la78o8l1th +++ mktemp ++ local LAST_ERR=/tmp/tmp.J97x8bbrRm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.la78o8l1th ++ cat /tmp/tmp.J97x8bbrRm ++ rm /tmp/tmp.la78o8l1th /tmp/tmp.J97x8bbrRm ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.TgXDZOXKpN ++ mktemp + local LAST_ERR=/tmp/tmp.AaScH5CQar + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TgXDZOXKpN + cat /tmp/tmp.AaScH5CQar + rm /tmp/tmp.TgXDZOXKpN /tmp/tmp.AaScH5CQar + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + desc 'restore from backup source, no storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, no storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ sed 's|s3://||' ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' +++ mktemp ++ sed -e 's/.json$//' ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.I9uXsHVdhh +++ mktemp ++ local LAST_ERR=/tmp/tmp.qzWqgq3iAw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.I9uXsHVdhh ++ cat /tmp/tmp.qzWqgq3iAw ++ rm /tmp/tmp.I9uXsHVdhh /tmp/tmp.qzWqgq3iAw ++ return 0 + backup_dest_minio=operator-testing/2025-05-20T08:31:27Z + run_recovery_check_bkp_source backup-minio operator-testing/2025-05-20T08:31:27Z some-name-rs0 backup-minio-source-1 + local backup=backup-minio + local backup_dest=operator-testing/2025-05-20T08:31:27Z + local cluster=some-name-rs0 + local source=backup-minio-source-1 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-11568 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HQeSVrpY9p +++ mktemp ++ local LAST_ERR=/tmp/tmp.ziX0yhcHgI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HQeSVrpY9p ++ cat /tmp/tmp.ziX0yhcHgI ++ rm /tmp/tmp.HQeSVrpY9p /tmp/tmp.ziX0yhcHgI ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.BYzfAeEfwE ++ mktemp + local LAST_ERR=/tmp/tmp.B7oQQXccX4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BYzfAeEfwE Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("f0585969-f647-45b6-a401-dbf0ce74ded4") } Percona Server for MongoDB server version: v8.0.8-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.B7oQQXccX4 + rm /tmp/tmp.BYzfAeEfwE /tmp/tmp.B7oQQXccX4 + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-11568 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:36:47+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-11568 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ml6SQ3nB14 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uDupBICuNm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ml6SQ3nB14 ++ cat /tmp/tmp.uDupBICuNm ++ rm /tmp/tmp.ml6SQ3nB14 /tmp/tmp.uDupBICuNm ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.FRFaGQiaDU ++ mktemp + local LAST_ERR=/tmp/tmp.WR0dMWjguM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FRFaGQiaDU + cat /tmp/tmp.WR0dMWjguM + rm /tmp/tmp.FRFaGQiaDU /tmp/tmp.WR0dMWjguM + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.hzgVPPt15y/find-2nd + run_restore_backupsource backup-minio-source-1 operator-testing/2025-05-20T08:31:27Z + local backupName=backup-minio-source-1 + local backupDest=operator-testing/2025-05-20T08:31:27Z + local storageName= + desc 'run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2025-05-20T08:31:27Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2025-05-20T08:31:27Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + /usr/bin/sed -e /storageName/d + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/restore-backupsource.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio-source-1/' + kubectl_bin apply -f - + /usr/bin/sed -e 's|BACKUP-NAME|operator-testing/2025-05-20T08:31:27Z|' ++ mktemp + local LAST_OUT=/tmp/tmp.AjSOzZHhkP ++ mktemp + local LAST_ERR=/tmp/tmp.o4bB5JhJEF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AjSOzZHhkP perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-1 created + cat /tmp/tmp.o4bB5JhJEF + rm /tmp/tmp.AjSOzZHhkP /tmp/tmp.o4bB5JhJEF + return 0 + return + wait_restore backup-minio-source-1 some-name + local backup_name=backup-minio-source-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace waiting psmdb-restore/restore-backup-minio-source-1 to reach ready state......OK + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JjMpxTeVjQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.BPxDto9nE1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JjMpxTeVjQ ++ cat /tmp/tmp.BPxDto9nE1 ++ rm /tmp/tmp.JjMpxTeVjQ /tmp/tmp.BPxDto9nE1 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:37:14+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local LAST_OUT=/tmp/tmp.iY12h7BuGo +++ mktemp ++ local LAST_ERR=/tmp/tmp.wivbrsWBwU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iY12h7BuGo ++ cat /tmp/tmp.wivbrsWBwU ++ rm /tmp/tmp.iY12h7BuGo /tmp/tmp.wivbrsWBwU ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.vv1NnkRz0p ++ mktemp + local LAST_ERR=/tmp/tmp.fLkxUwEDRZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vv1NnkRz0p + cat /tmp/tmp.fLkxUwEDRZ + rm /tmp/tmp.vv1NnkRz0p /tmp/tmp.fLkxUwEDRZ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:37:18+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QWWmBtFh3i +++ mktemp ++ local LAST_ERR=/tmp/tmp.V3d7gUqkkw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QWWmBtFh3i ++ cat /tmp/tmp.V3d7gUqkkw ++ rm /tmp/tmp.QWWmBtFh3i /tmp/tmp.V3d7gUqkkw ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.11xaWTRgZP ++ mktemp + local LAST_ERR=/tmp/tmp.o1pkoEOyyi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.11xaWTRgZP + cat /tmp/tmp.o1pkoEOyyi + rm /tmp/tmp.11xaWTRgZP /tmp/tmp.o1pkoEOyyi + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-20T08:37:21+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.y8BTvfFlzS +++ mktemp ++ local LAST_ERR=/tmp/tmp.o09XXyefCY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.y8BTvfFlzS ++ cat /tmp/tmp.o09XXyefCY ++ rm /tmp/tmp.y8BTvfFlzS /tmp/tmp.o09XXyefCY ++ return 0 + local client_container=psmdb-client-66f577db5f-4pv85 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.zfj7Km5YB1 ++ mktemp + local LAST_ERR=/tmp/tmp.2ORPf6ee8L + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-4pv85 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-11568.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zfj7Km5YB1 + cat /tmp/tmp.2ORPf6ee8L + rm /tmp/tmp.zfj7Km5YB1 /tmp/tmp.2ORPf6ee8L + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/compare/find.json /tmp/tmp.hzgVPPt15y/find + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.ixYzEshjva ++ mktemp + local LAST_ERR=/tmp/tmp.94uuzYcwRd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ixYzEshjva perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted + cat /tmp/tmp.94uuzYcwRd + rm /tmp/tmp.ixYzEshjva /tmp/tmp.94uuzYcwRd + return 0 ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ grep -c operator-testing/2025-05-20T08:31:27Z ++ cat +++ mktemp ++ local LAST_OUT=/tmp/tmp.ERsNVCW2We +++ mktemp ++ local LAST_ERR=/tmp/tmp.OZh6ZYOvoI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ERsNVCW2We ++ cat /tmp/tmp.OZh6ZYOvoI ++ rm /tmp/tmp.ERsNVCW2We /tmp/tmp.OZh6ZYOvoI ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-20T08:30:52Z aws-s3 + path=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-20T08:30:52Z + storage_name=aws-s3 + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-20T08:30:52Z + [[ 403 -eq 403 ]] + check_backup_deletion https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z gcp-cs + path=https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z + storage_name=gcp-cs + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z + [[ 404 -eq 404 ]] + check_backup_deletion https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z azure-blob + path=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z + storage_name=azure-blob + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z + [[ 404 -eq 404 ]] + desc 'checking backup deletion without cr' + set +o xtrace ----------------------------------------------------------------------------------- checking backup deletion without cr ----------------------------------------------------------------------------------- + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.tW727bzVNm ++ mktemp + local LAST_ERR=/tmp/tmp.2gpPLnaHDa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tW727bzVNm perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.2gpPLnaHDa + rm /tmp/tmp.tW727bzVNm /tmp/tmp.2gpPLnaHDa + return 0 + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local type=logical + desc 'run backup backup-aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/backup-aws-s3.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.UOVIBsR9pe ++ mktemp + local LAST_ERR=/tmp/tmp.bm06XvQ2Y9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UOVIBsR9pe perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.bm06XvQ2Y9 + rm /tmp/tmp.UOVIBsR9pe /tmp/tmp.bm06XvQ2Y9 + return 0 + run_backup gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + local type=logical + desc 'run backup backup-gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-gcp-cs" | .spec.storageName = "gcp-cs" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/backup-gcp-cs.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.mYl9sJMAIb ++ mktemp + local LAST_ERR=/tmp/tmp.59jGQ7PmOI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mYl9sJMAIb perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.59jGQ7PmOI + rm /tmp/tmp.mYl9sJMAIb /tmp/tmp.59jGQ7PmOI + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local type=logical + desc 'run backup backup-azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/e2e-tests/demand-backup/conf/backup-azure-blob.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ebteXjFPK8 ++ mktemp + local LAST_ERR=/tmp/tmp.Da7eyT7wYf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ebteXjFPK8 perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.Da7eyT7wYf + rm /tmp/tmp.ebteXjFPK8 /tmp/tmp.Da7eyT7wYf + return 0 + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state. + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state......... + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + local target_state=ready + set +o xtrace waiting for backup-gcp-cs to reach ready state. + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state.... + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.AzGunNKc02 ++ mktemp + local LAST_ERR=/tmp/tmp.exi3wCj7AU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AzGunNKc02 perconaservermongodb.psmdb.percona.com "some-name" deleted + cat /tmp/tmp.exi3wCj7AU + rm /tmp/tmp.AzGunNKc02 /tmp/tmp.exi3wCj7AU + return 0 + sleep 60 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.idiSMn72b4 ++ mktemp + local LAST_ERR=/tmp/tmp.ruQk5RwEsn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.idiSMn72b4 perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted + cat /tmp/tmp.ruQk5RwEsn + rm /tmp/tmp.idiSMn72b4 /tmp/tmp.ruQk5RwEsn + return 0 ++ grep -c operator-testing/2025-05-20T08:31:27Z ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ cat +++ mktemp ++ local LAST_OUT=/tmp/tmp.4BGFrRqkeu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Za2r8OvfFQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4BGFrRqkeu ++ cat /tmp/tmp.Za2r8OvfFQ ++ rm /tmp/tmp.4BGFrRqkeu /tmp/tmp.Za2r8OvfFQ ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-20T08:30:52Z aws-s3 + path=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-20T08:30:52Z + storage_name=aws-s3 + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-20T08:30:52Z + [[ 403 -eq 403 ]] + check_backup_deletion https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z gcp-cs + path=https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z + storage_name=gcp-cs + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-20T08:31:15Z + [[ 404 -eq 404 ]] + check_backup_deletion https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z azure-blob + path=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z + storage_name=azure-blob + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-20T08:31:04Z + [[ 404 -eq 404 ]] + desc 'check for passwords leak' + set +o xtrace ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- + check_passwords_leak + local secrets + local passwords + local pods ++ kubectl_bin get secrets -o json ++ jq -r '.items[].data | to_entries | .[] | select(.key | (contains("_PASSWORD"))) | .value' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mndzoz1rmR +++ mktemp ++ local LAST_ERR=/tmp/tmp.LEJYRjJ27M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Mndzoz1rmR ++ cat /tmp/tmp.LEJYRjJ27M ++ rm /tmp/tmp.Mndzoz1rmR /tmp/tmp.LEJYRjJ27M ++ return 0 + secrets='YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2' + echo secrets=YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 secrets=YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo + passwords='backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2' + echo passwords=backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 ++ awk -F / '{print $2}' ++ kubectl_bin get pods -o name +++ mktemp ++ local LAST_OUT=/tmp/tmp.DSZgSPjTxj +++ mktemp ++ local LAST_ERR=/tmp/tmp.FRhNP6FBU7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DSZgSPjTxj ++ cat /tmp/tmp.FRhNP6FBU7 ++ rm /tmp/tmp.DSZgSPjTxj /tmp/tmp.FRhNP6FBU7 ++ return 0 + pods='minio-service-8967c7f7f-sds8k psmdb-client-66f577db5f-4pv85' + echo pods=minio-service-8967c7f7f-sds8k psmdb-client-66f577db5f-4pv85 pods=minio-service-8967c7f7f-sds8k psmdb-client-66f577db5f-4pv85 + collect_logs demand-backup-11568 + local containers + local count + NS=demand-backup-11568 + for p in '$pods' ++ kubectl_bin -n demand-backup-11568 get pod minio-service-8967c7f7f-sds8k -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NIHp8nZAMR +++ mktemp ++ local LAST_ERR=/tmp/tmp.mWvQ0b1QjO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n demand-backup-11568 get pod minio-service-8967c7f7f-sds8k -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NIHp8nZAMR ++ cat /tmp/tmp.mWvQ0b1QjO ++ rm /tmp/tmp.NIHp8nZAMR /tmp/tmp.mWvQ0b1QjO ++ return 0 + containers=minio + for c in '$containers' + [[ minio =~ pmm ]] + kubectl_bin -n demand-backup-11568 logs minio-service-8967c7f7f-sds8k -c minio ++ mktemp + local LAST_OUT=/tmp/tmp.1jueKwa0db ++ mktemp + local LAST_ERR=/tmp/tmp.ti6VJIYFpZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n demand-backup-11568 logs minio-service-8967c7f7f-sds8k -c minio + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1jueKwa0db + cat /tmp/tmp.ti6VJIYFpZ + rm /tmp/tmp.1jueKwa0db /tmp/tmp.ti6VJIYFpZ + return 0 + echo logs saved in: /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt logs saved in: /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-minio-service-8967c7f7f-sds8k-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + for p in '$pods' ++ kubectl_bin -n demand-backup-11568 get pod psmdb-client-66f577db5f-4pv85 -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AJTio1w56m +++ mktemp ++ local LAST_ERR=/tmp/tmp.hkgZmnKh18 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n demand-backup-11568 get pod psmdb-client-66f577db5f-4pv85 -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AJTio1w56m ++ cat /tmp/tmp.hkgZmnKh18 ++ rm /tmp/tmp.AJTio1w56m /tmp/tmp.hkgZmnKh18 ++ return 0 + containers=psmdb-client + for c in '$containers' + [[ psmdb-client =~ pmm ]] + kubectl_bin -n demand-backup-11568 logs psmdb-client-66f577db5f-4pv85 -c psmdb-client ++ mktemp + local LAST_OUT=/tmp/tmp.9FwOMica2C ++ mktemp + local LAST_ERR=/tmp/tmp.KBBf5GkD1n + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n demand-backup-11568 logs psmdb-client-66f577db5f-4pv85 -c psmdb-client + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9FwOMica2C + cat /tmp/tmp.KBBf5GkD1n + rm /tmp/tmp.9FwOMica2C /tmp/tmp.KBBf5GkD1n + return 0 + echo logs saved in: /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt logs saved in: /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-psmdb-client-66f577db5f-4pv85-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + '[' -n psmdb-operator ']' ++ awk -F / '{print $2}' ++ kubectl_bin -n psmdb-operator get pods -o name +++ mktemp ++ local LAST_OUT=/tmp/tmp.O4XLK2g8AJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.rdUyoz7gQC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n psmdb-operator get pods -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.O4XLK2g8AJ ++ cat /tmp/tmp.rdUyoz7gQC ++ rm /tmp/tmp.O4XLK2g8AJ /tmp/tmp.rdUyoz7gQC ++ return 0 + pods=percona-server-mongodb-operator-55966bfd7b-f2rzs + collect_logs psmdb-operator + local containers + local count + NS=psmdb-operator + for p in '$pods' ++ kubectl_bin -n psmdb-operator get pod percona-server-mongodb-operator-55966bfd7b-f2rzs -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.La311osUfj +++ mktemp ++ local LAST_ERR=/tmp/tmp.BNQjKAE1b7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n psmdb-operator get pod percona-server-mongodb-operator-55966bfd7b-f2rzs -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.La311osUfj ++ cat /tmp/tmp.BNQjKAE1b7 ++ rm /tmp/tmp.La311osUfj /tmp/tmp.BNQjKAE1b7 ++ return 0 + containers=percona-server-mongodb-operator + for c in '$containers' + [[ percona-server-mongodb-operator =~ pmm ]] + kubectl_bin -n psmdb-operator logs percona-server-mongodb-operator-55966bfd7b-f2rzs -c percona-server-mongodb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.EkcbXkuad3 ++ mktemp + local LAST_ERR=/tmp/tmp.PniJWEmKk0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n psmdb-operator logs percona-server-mongodb-operator-55966bfd7b-f2rzs -c percona-server-mongodb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EkcbXkuad3 + cat /tmp/tmp.PniJWEmKk0 + rm /tmp/tmp.EkcbXkuad3 /tmp/tmp.PniJWEmKk0 + return 0 + echo logs saved in: /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt logs saved in: /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.hzgVPPt15y/logs_output-percona-server-mongodb-operator-55966bfd7b-f2rzs-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + destroy demand-backup-11568 + local namespace=demand-backup-11568 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.DFSn02apnU ++ mktemp + local LAST_ERR=/tmp/tmp.ni7gBBmCDa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DFSn02apnU customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.ni7gBBmCDa + rm /tmp/tmp.DFSn02apnU /tmp/tmp.ni7gBBmCDa + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.i9G1d59QUx ++ mktemp + local LAST_ERR=/tmp/tmp.ooHInfr4bN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.i9G1d59QUx + cat /tmp/tmp.ooHInfr4bN + rm /tmp/tmp.i9G1d59QUx /tmp/tmp.ooHInfr4bN + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.eTNA89LWKZ ++ mktemp + local LAST_ERR=/tmp/tmp.ntbsIsKTEI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eTNA89LWKZ + cat /tmp/tmp.ntbsIsKTEI + rm /tmp/tmp.eTNA89LWKZ /tmp/tmp.ntbsIsKTEI + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.h5VUiaxe3R ++ mktemp + local LAST_ERR=/tmp/tmp.KPUJ7ZHPNo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.h5VUiaxe3R + cat /tmp/tmp.KPUJ7ZHPNo + rm /tmp/tmp.h5VUiaxe3R /tmp/tmp.KPUJ7ZHPNo + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.G6gBADcSFK ++ mktemp + local LAST_ERR=/tmp/tmp.UsgNv9ZVS2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1935/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.G6gBADcSFK clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.UsgNv9ZVS2 + rm /tmp/tmp.G6gBADcSFK /tmp/tmp.UsgNv9ZVS2 + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.umsGyQrMxO ++ mktemp + local LAST_ERR=/tmp/tmp.xuhf6KKlbx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.umsGyQrMxO + cat /tmp/tmp.xuhf6KKlbx Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.umsGyQrMxO + cat /tmp/tmp.xuhf6KKlbx Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.umsGyQrMxO + cat /tmp/tmp.xuhf6KKlbx Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.umsGyQrMxO + cat /tmp/tmp.xuhf6KKlbx Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.16.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.umsGyQrMxO /tmp/tmp.xuhf6KKlbx + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace demand-backup-11568 + rm -rf /tmp/tmp.hzgVPPt15y + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.kbT6mgUzwj + local LAST_OUT=/tmp/tmp.G9AuRMOFH3 ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.tSYgnAXbUn + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.VkKBMKcN8Z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace demand-backup-11568