Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/logs/demand-backup.log WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 + create_infra demand-backup-26315 + local ns=demand-backup-26315 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.30XWmVTr4T ++ mktemp + local LAST_ERR=/tmp/tmp.Ht4SCccczq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.30XWmVTr4T customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Ht4SCccczq + rm /tmp/tmp.30XWmVTr4T /tmp/tmp.Ht4SCccczq + return 0 ++ grep -v '\-\-\-' ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE No resources found + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.C0MK0H7kIk ++ mktemp + local LAST_ERR=/tmp/tmp.n0miV9nZ6M + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C0MK0H7kIk + cat /tmp/tmp.n0miV9nZ6M + rm /tmp/tmp.C0MK0H7kIk /tmp/tmp.n0miV9nZ6M + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.X5auiG7JqM ++ mktemp + local LAST_ERR=/tmp/tmp.MDNBqsncOe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.X5auiG7JqM + cat /tmp/tmp.MDNBqsncOe + rm /tmp/tmp.X5auiG7JqM /tmp/tmp.MDNBqsncOe + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.u5FktNHiPP ++ mktemp + local LAST_ERR=/tmp/tmp.kLnJW5ul4K + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.u5FktNHiPP + cat /tmp/tmp.kLnJW5ul4K + rm /tmp/tmp.u5FktNHiPP /tmp/tmp.kLnJW5ul4K + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.X69jZQnSGn ++ mktemp + local LAST_ERR=/tmp/tmp.EoT8ECVrY4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.X69jZQnSGn clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.EoT8ECVrY4 + rm /tmp/tmp.X69jZQnSGn /tmp/tmp.EoT8ECVrY4 + return 0 + check_crd_for_deletion PR-1904-fdb74d1e + local git_tag=PR-1904-fdb74d1e ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1904-fdb74d1e/deploy/crd.yaml ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' ++ yq eval .metadata.name ++ /usr/bin/sed s/---//g + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aj8UOWNYBV +++ mktemp ++ local LAST_ERR=/tmp/tmp.WWgCCuMb1P ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.aj8UOWNYBV ++ cat /tmp/tmp.WWgCCuMb1P Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.aj8UOWNYBV ++ cat /tmp/tmp.WWgCCuMb1P Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.aj8UOWNYBV ++ cat /tmp/tmp.WWgCCuMb1P Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.aj8UOWNYBV ++ cat /tmp/tmp.WWgCCuMb1P Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.aj8UOWNYBV /tmp/tmp.WWgCCuMb1P ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ++ mktemp ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found + local LAST_OUT=/tmp/tmp.eXDnwxaRmO ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.9izQEJIFAO + awk '{print$1}' ++ mktemp + local LAST_ERR=/tmp/tmp.tQDre0Z1FD + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.bHEhZanf4o + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eXDnwxaRmO + cat /tmp/tmp.tQDre0Z1FD + rm /tmp/tmp.eXDnwxaRmO /tmp/tmp.tQDre0Z1FD + return 0 namespace "demand-backup-26832" deleted namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9izQEJIFAO namespace "psmdb-operator" deleted + cat /tmp/tmp.bHEhZanf4o + rm /tmp/tmp.9izQEJIFAO /tmp/tmp.bHEhZanf4o + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.51RwbMEdh9 ++ mktemp + local LAST_ERR=/tmp/tmp.uISmDPC4ea + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.51RwbMEdh9 + cat /tmp/tmp.uISmDPC4ea + rm /tmp/tmp.51RwbMEdh9 /tmp/tmp.uISmDPC4ea + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.yPybpco3nM ++ mktemp + local LAST_ERR=/tmp/tmp.GFSbb83p9n + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yPybpco3nM namespace/psmdb-operator created + cat /tmp/tmp.GFSbb83p9n + rm /tmp/tmp.yPybpco3nM /tmp/tmp.GFSbb83p9n + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.401ICqsNUl +++ mktemp ++ local LAST_ERR=/tmp/tmp.pPB9tIvEtR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.401ICqsNUl ++ cat /tmp/tmp.pPB9tIvEtR ++ rm /tmp/tmp.401ICqsNUl /tmp/tmp.pPB9tIvEtR ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1904-fdb74d1e-9-cluster10 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.Xzr9q5kyx2 ++ mktemp + local LAST_ERR=/tmp/tmp.va6H6GVRZc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1904-fdb74d1e-9-cluster10 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Xzr9q5kyx2 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1904-fdb74d1e-9-cluster10" modified. + cat /tmp/tmp.va6H6GVRZc + rm /tmp/tmp.Xzr9q5kyx2 /tmp/tmp.va6H6GVRZc + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.JNXIiWe1Ca ++ mktemp + local LAST_ERR=/tmp/tmp.z7gGYRsDam + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JNXIiWe1Ca customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.z7gGYRsDam + rm /tmp/tmp.JNXIiWe1Ca /tmp/tmp.z7gGYRsDam + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - + sed -e 's^namespace: .*^namespace: psmdb-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.t4CJXrsW3x ++ mktemp + local LAST_ERR=/tmp/tmp.BB5zeViy0d + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.t4CJXrsW3x clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.BB5zeViy0d + rm /tmp/tmp.t4CJXrsW3x /tmp/tmp.BB5zeViy0d + return 0 + kubectl_bin apply -f - + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1904-fdb74d1e") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.GNEX6mpLh8 ++ mktemp + local LAST_ERR=/tmp/tmp.GXVE1V3U9I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GNEX6mpLh8 deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.GXVE1V3U9I + rm /tmp/tmp.GNEX6mpLh8 /tmp/tmp.GXVE1V3U9I + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.5b7Iho12aJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.6xyzXkGXRD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5b7Iho12aJ ++ cat /tmp/tmp.6xyzXkGXRD ++ rm /tmp/tmp.5b7Iho12aJ /tmp/tmp.6xyzXkGXRD ++ return 0 + wait_pod percona-server-mongodb-operator-c4f9b4485-2qnm8 + local pod=percona-server-mongodb-operator-c4f9b4485-2qnm8 + set +o xtrace waiting for pod/percona-server-mongodb-operator-c4f9b4485-2qnm8 to be ready.OK + create_namespace demand-backup-26315 + local namespace=demand-backup-26315 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrole ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces demand-backup-26315' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-26315 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace demand-backup-26315 --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.tOEyuOJFTP + kubectl_bin get ns ++ mktemp + local LAST_ERR=/tmp/tmp.qGWpGmrR6Q + local exit_status=0 + local timeout=4 ++ mktemp + local LAST_OUT=/tmp/tmp.20RX19lCGZ ++ seq 0 2 + awk '{print$1}' + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + for i in '$(seq 0 2)' + set +e + kubectl delete namespace demand-backup-26315 --ignore-not-found + xargs kubectl delete ns ++ mktemp + local LAST_ERR=/tmp/tmp.6VgrIo3nv9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.20RX19lCGZ + cat /tmp/tmp.6VgrIo3nv9 + rm /tmp/tmp.20RX19lCGZ /tmp/tmp.6VgrIo3nv9 + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tOEyuOJFTP + cat /tmp/tmp.qGWpGmrR6Q + rm /tmp/tmp.tOEyuOJFTP /tmp/tmp.qGWpGmrR6Q + return 0 + kubectl_bin wait --for=delete namespace demand-backup-26315 ++ mktemp + local LAST_OUT=/tmp/tmp.kwvncJ9HxM ++ mktemp + local LAST_ERR=/tmp/tmp.xKDAQomJPH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace demand-backup-26315 namespace "gke-managed-cim" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kwvncJ9HxM + cat /tmp/tmp.xKDAQomJPH + rm /tmp/tmp.kwvncJ9HxM /tmp/tmp.xKDAQomJPH + return 0 + desc 'create namespace demand-backup-26315' + set +o xtrace ----------------------------------------------------------------------------------- create namespace demand-backup-26315 ----------------------------------------------------------------------------------- + kubectl_bin create namespace demand-backup-26315 ++ mktemp + local LAST_OUT=/tmp/tmp.Fyjx0Pfiw0 ++ mktemp + local LAST_ERR=/tmp/tmp.mEsn9syDJe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace demand-backup-26315 namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Fyjx0Pfiw0 namespace/demand-backup-26315 created + cat /tmp/tmp.mEsn9syDJe + rm /tmp/tmp.Fyjx0Pfiw0 /tmp/tmp.mEsn9syDJe + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.YUPx1gXrDX +++ mktemp ++ local LAST_ERR=/tmp/tmp.adpuxsQsx0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YUPx1gXrDX ++ cat /tmp/tmp.adpuxsQsx0 ++ rm /tmp/tmp.YUPx1gXrDX /tmp/tmp.adpuxsQsx0 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1904-fdb74d1e-9-cluster10 --namespace=demand-backup-26315 ++ mktemp + local LAST_OUT=/tmp/tmp.5SSKwvADXC ++ mktemp + local LAST_ERR=/tmp/tmp.2uSnuedyPm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1904-fdb74d1e-9-cluster10 --namespace=demand-backup-26315 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5SSKwvADXC Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1904-fdb74d1e-9-cluster10" modified. + cat /tmp/tmp.2uSnuedyPm + rm /tmp/tmp.5SSKwvADXC /tmp/tmp.2uSnuedyPm + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Mon May 12 12:24:57 2025 NAMESPACE: demand-backup-26315 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-26315.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-26315 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-26315 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-26315 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-26315 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eiJldC48hx +++ mktemp ++ local LAST_ERR=/tmp/tmp.wKua1g6wgL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eiJldC48hx ++ cat /tmp/tmp.wKua1g6wgL ++ rm /tmp/tmp.eiJldC48hx /tmp/tmp.wKua1g6wgL ++ return 0 + MINIO_POD=minio-service-86dfccd949-klrtf + wait_pod minio-service-86dfccd949-klrtf + local pod=minio-service-86dfccd949-klrtf + set +o xtrace waiting for pod/minio-service-86dfccd949-klrtf to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-26315.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.YYwNfC1rri ++ mktemp + local LAST_ERR=/tmp/tmp.TpIt1Az6Gg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-26315.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YYwNfC1rri service/minio-service created + cat /tmp/tmp.TpIt1Az6Gg + rm /tmp/tmp.YYwNfC1rri /tmp/tmp.TpIt1Az6Gg + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.Fff2OBrPRy ++ mktemp + local LAST_ERR=/tmp/tmp.heqxnFr1me + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Fff2OBrPRy make_bucket: operator-testing pod "aws-cli" deleted + cat /tmp/tmp.heqxnFr1me If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-26315 + rm /tmp/tmp.Fff2OBrPRy /tmp/tmp.heqxnFr1me + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + cluster=some-name-rs0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.OzDbSop8uT ++ mktemp + local LAST_ERR=/tmp/tmp.dXqDPpKq94 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OzDbSop8uT secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.dXqDPpKq94 + rm /tmp/tmp.OzDbSop8uT /tmp/tmp.dXqDPpKq94 + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.JhwrJ5plup ++ mktemp + local LAST_ERR=/tmp/tmp.N8ksz2h3wh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JhwrJ5plup secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.N8ksz2h3wh + rm /tmp/tmp.JhwrJ5plup /tmp/tmp.N8ksz2h3wh + return 0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/some-name-rs0.yml + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/some-name-rs0.yml + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1904-fdb74d1e"' + kubectl_bin apply -f - + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ mktemp + local LAST_OUT=/tmp/tmp.pT6P54vtvH ++ mktemp + local LAST_ERR=/tmp/tmp.w71DawETAP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pT6P54vtvH perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.w71DawETAP + rm /tmp/tmp.pT6P54vtvH /tmp/tmp.w71DawETAP + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready........OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.........OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vuVBvCWk8t +++ mktemp ++ local LAST_ERR=/tmp/tmp.oZXS2HbjZH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vuVBvCWk8t ++ cat /tmp/tmp.oZXS2HbjZH ++ rm /tmp/tmp.vuVBvCWk8t /tmp/tmp.oZXS2HbjZH ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready........OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tNkwrrUP0C +++ mktemp ++ local LAST_ERR=/tmp/tmp.qcXgjsSoJe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tNkwrrUP0C ++ cat /tmp/tmp.qcXgjsSoJe ++ rm /tmp/tmp.tNkwrrUP0C /tmp/tmp.qcXgjsSoJe ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.78sjnJyyHF/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-26315", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.uRfjImln0K ++ mktemp + local LAST_ERR=/tmp/tmp.pa1x1VqPl1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uRfjImln0K + cat /tmp/tmp.pa1x1VqPl1 + rm /tmp/tmp.uRfjImln0K /tmp/tmp.pa1x1VqPl1 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.78sjnJyyHF/statefulset_some-name-rs0.yml + version_gt 1.22 ++ bc -l ++ echo '1.30 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.78sjnJyyHF/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.78sjnJyyHF/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml /tmp/tmp.78sjnJyyHF/statefulset_some-name-rs0.yml + desc 'create user' + set +o xtrace ----------------------------------------------------------------------------------- create user ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9OJRyHTvl9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JnKmroE5SV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9OJRyHTvl9 ++ cat /tmp/tmp.JnKmroE5SV ++ rm /tmp/tmp.9OJRyHTvl9 /tmp/tmp.JnKmroE5SV ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ytB0iUu6VH ++ mktemp + local LAST_ERR=/tmp/tmp.9h1aZtdFJF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ytB0iUu6VH Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("55752b27-78e9-4855-9abe-6c76636444f9") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.9h1aZtdFJF + rm /tmp/tmp.ytB0iUu6VH /tmp/tmp.9h1aZtdFJF + return 0 + sleep 2 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jHLPtXzmyM +++ mktemp ++ local LAST_ERR=/tmp/tmp.yCMDAyygpK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jHLPtXzmyM ++ cat /tmp/tmp.yCMDAyygpK ++ rm /tmp/tmp.jHLPtXzmyM /tmp/tmp.yCMDAyygpK ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.NUvCqtFHEF ++ mktemp + local LAST_ERR=/tmp/tmp.yPWrpyVZgc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NUvCqtFHEF Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("fcb22db5-a1be-4c78-8af0-245a2ddfb0aa") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.yPWrpyVZgc + rm /tmp/tmp.NUvCqtFHEF /tmp/tmp.yPWrpyVZgc + return 0 + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pOw9rmu3Bz +++ mktemp ++ local LAST_ERR=/tmp/tmp.2NPOcwPJ0s ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pOw9rmu3Bz ++ cat /tmp/tmp.2NPOcwPJ0s ++ rm /tmp/tmp.pOw9rmu3Bz /tmp/tmp.2NPOcwPJ0s ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.qvL5Myb9uX ++ mktemp + local LAST_ERR=/tmp/tmp.FWZXyveBIx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qvL5Myb9uX Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("8981398b-912a-4584-b57a-533b533abdaa") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.FWZXyveBIx + rm /tmp/tmp.qvL5Myb9uX /tmp/tmp.FWZXyveBIx + return 0 + run_mongo 'use myApp\n db.test3.insert({ x: 100502 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test3.insert({ x: 100502 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.roU3Hwlqwt +++ mktemp ++ local LAST_ERR=/tmp/tmp.igdxLUM9fy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.roU3Hwlqwt ++ cat /tmp/tmp.igdxLUM9fy ++ rm /tmp/tmp.roU3Hwlqwt /tmp/tmp.igdxLUM9fy ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test3.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.eBpJ0aMmpk ++ mktemp + local LAST_ERR=/tmp/tmp.5870xyT4bY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test3.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eBpJ0aMmpk Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ab010a19-967b-4135-b8fa-fc745086875b") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.5870xyT4bY + rm /tmp/tmp.eBpJ0aMmpk /tmp/tmp.5870xyT4bY + return 0 + custom_user_name=test1user + custom_role_name=test1role + run_mongo 'use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OIfLV1QSaT +++ mktemp ++ local LAST_ERR=/tmp/tmp.HxBiEHRqvv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OIfLV1QSaT ++ cat /tmp/tmp.HxBiEHRqvv ++ rm /tmp/tmp.OIfLV1QSaT /tmp/tmp.HxBiEHRqvv ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.4yDJYYtYRq ++ mktemp + local LAST_ERR=/tmp/tmp.80PRofMBnU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4yDJYYtYRq Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("1ca0226c-648a-42ef-8cf4-ec8b17f6e9b6") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp Successfully added user: { "user" : "test1user", "roles" : [ ] } bye + cat /tmp/tmp.80PRofMBnU + rm /tmp/tmp.4yDJYYtYRq /tmp/tmp.80PRofMBnU + return 0 + run_mongo 'use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DrJS7jPVbB +++ mktemp ++ local LAST_ERR=/tmp/tmp.vQAHJzr7WW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DrJS7jPVbB ++ cat /tmp/tmp.vQAHJzr7WW ++ rm /tmp/tmp.DrJS7jPVbB /tmp/tmp.vQAHJzr7WW ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.5TNltSuSWQ ++ mktemp + local LAST_ERR=/tmp/tmp.ntQ5B1f7yW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5TNltSuSWQ Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("02d76b4b-5d00-4699-8c44-545016c6c687") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp { "role" : "test1role", "privileges" : [ ], "roles" : [ ] } bye + cat /tmp/tmp.ntQ5B1f7yW + rm /tmp/tmp.5TNltSuSWQ /tmp/tmp.ntQ5B1f7yW + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:27:36+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Byki3dASd6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Idb8vjPzMQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Byki3dASd6 ++ cat /tmp/tmp.Idb8vjPzMQ ++ rm /tmp/tmp.Byki3dASd6 /tmp/tmp.Idb8vjPzMQ ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.qamOQUh2zw ++ mktemp + local LAST_ERR=/tmp/tmp.JgbuO5zawn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qamOQUh2zw + cat /tmp/tmp.JgbuO5zawn + rm /tmp/tmp.qamOQUh2zw /tmp/tmp.JgbuO5zawn + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:27:40+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nP0WW031Vn +++ mktemp ++ local LAST_ERR=/tmp/tmp.RkRGqTkw9Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nP0WW031Vn ++ cat /tmp/tmp.RkRGqTkw9Q ++ rm /tmp/tmp.nP0WW031Vn /tmp/tmp.RkRGqTkw9Q ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.aOP6PEyIiO ++ mktemp + local LAST_ERR=/tmp/tmp.GJToQxqexj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aOP6PEyIiO + cat /tmp/tmp.GJToQxqexj + rm /tmp/tmp.aOP6PEyIiO /tmp/tmp.GJToQxqexj + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:27:44+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BQekQm0dLp +++ mktemp ++ local LAST_ERR=/tmp/tmp.WoUejcUa9Y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BQekQm0dLp ++ cat /tmp/tmp.WoUejcUa9Y ++ rm /tmp/tmp.BQekQm0dLp /tmp/tmp.WoUejcUa9Y ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.oV86iVkpYf ++ mktemp + local LAST_ERR=/tmp/tmp.y2qwmOY38j + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oV86iVkpYf + cat /tmp/tmp.y2qwmOY38j + rm /tmp/tmp.oV86iVkpYf /tmp/tmp.y2qwmOY38j + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2025-05-12T12:27:06.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2025-05-12T12:27:08.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2025-05-12T12:27:11.000+0000 I listening for the commands + backup_name_minio=backup-minio + '[' -z '' ']' + backup_name_aws=backup-aws-s3 + backup_name_gcp=backup-gcp-cs + backup_name_azure=backup-azure-blob + desc 'run backups' + set +o xtrace ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local type=logical + desc 'run backup backup-aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/backup-aws-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.vsLC5esvxU ++ mktemp + local LAST_ERR=/tmp/tmp.wulLbMS78w + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vsLC5esvxU perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.wulLbMS78w + rm /tmp/tmp.vsLC5esvxU /tmp/tmp.wulLbMS78w + return 0 + run_backup gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + local type=logical + desc 'run backup backup-gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-gcp-cs" | .spec.storageName = "gcp-cs" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/backup-gcp-cs.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.wil6SASktr ++ mktemp + local LAST_ERR=/tmp/tmp.kUUKqu31qx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wil6SASktr perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.kUUKqu31qx + rm /tmp/tmp.wil6SASktr /tmp/tmp.kUUKqu31qx + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local type=logical + desc 'run backup backup-azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/backup-azure-blob.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.eOhsYgOziV ++ mktemp + local LAST_ERR=/tmp/tmp.CGfSNfOBNO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eOhsYgOziV perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.CGfSNfOBNO + rm /tmp/tmp.eOhsYgOziV /tmp/tmp.CGfSNfOBNO + return 0 + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/backup-minio.yml ++ mktemp + local LAST_OUT=/tmp/tmp.J44e2nFSFj ++ mktemp + local LAST_ERR=/tmp/tmp.HUui0ZTxuV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.J44e2nFSFj perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.HUui0ZTxuV + rm /tmp/tmp.J44e2nFSFj /tmp/tmp.HUui0ZTxuV + return 0 + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state. + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + local target_state=ready + set +o xtrace waiting for backup-gcp-cs to reach ready state............. + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state. + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state. + sleep 5 + '[' -z '' ']' + desc 'check backup and restore -- aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- ++ get_backup_dest backup-aws-s3 ++ local backup_name=backup-aws-s3 ++ kubectl_bin get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.emTBf4QEbL +++ mktemp ++ local LAST_ERR=/tmp/tmp.YtxormVyPk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.emTBf4QEbL ++ cat /tmp/tmp.YtxormVyPk ++ rm /tmp/tmp.emTBf4QEbL /tmp/tmp.YtxormVyPk ++ return 0 + backup_dest_aws=operator-testing/psmdb-demand-backup/2025-05-12T12:27:57Z + gunzip + curl -s https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-12T12:27:57Z/rs0/myApp.test.gz + run_recovery_check backup-aws-s3 some-name-rs0 + local backup=backup-aws-s3 + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cltn074kef +++ mktemp ++ local LAST_ERR=/tmp/tmp.qk56O5UiG3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cltn074kef ++ cat /tmp/tmp.qk56O5UiG3 ++ rm /tmp/tmp.cltn074kef /tmp/tmp.qk56O5UiG3 ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.53AUzQV1Gv ++ mktemp + local LAST_ERR=/tmp/tmp.dWWDSNwj3a + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.53AUzQV1Gv Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("8dd82bfc-b97b-4d08-8a26-1817b0f70d9d") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.dWWDSNwj3a + rm /tmp/tmp.53AUzQV1Gv /tmp/tmp.dWWDSNwj3a + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-26315 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:28:56+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-26315 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cN3b7VkaLi +++ mktemp ++ local LAST_ERR=/tmp/tmp.gVxam1UD7l ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cN3b7VkaLi ++ cat /tmp/tmp.gVxam1UD7l ++ rm /tmp/tmp.cN3b7VkaLi /tmp/tmp.gVxam1UD7l ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.gAzTpeICcU ++ mktemp + local LAST_ERR=/tmp/tmp.WTAWzn2nDp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gAzTpeICcU + cat /tmp/tmp.WTAWzn2nDp + rm /tmp/tmp.gAzTpeICcU /tmp/tmp.WTAWzn2nDp + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.78sjnJyyHF/find-2nd + run_restore backup-aws-s3 + local backup_name=backup-aws-s3 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-backup-aws-s3/' + /usr/bin/sed -e 's/backupName:/backupName: backup-aws-s3/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.x3tEF6SgxW ++ mktemp + local LAST_ERR=/tmp/tmp.3p7sP3s5cB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.x3tEF6SgxW perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created + cat /tmp/tmp.3p7sP3s5cB + rm /tmp/tmp.x3tEF6SgxW /tmp/tmp.3p7sP3s5cB + return 0 + wait_restore backup-aws-s3 some-name + local backup_name=backup-aws-s3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-aws-s3 object to be createdOK Waiting psmdb-restore/restore-backup-aws-s3 to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XOsk07ToxZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.AYCYlJTAPv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XOsk07ToxZ ++ cat /tmp/tmp.AYCYlJTAPv ++ rm /tmp/tmp.XOsk07ToxZ /tmp/tmp.AYCYlJTAPv ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:29:21+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hF2XiikaKt +++ mktemp ++ local LAST_ERR=/tmp/tmp.4y3Xz5tNKs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hF2XiikaKt ++ cat /tmp/tmp.4y3Xz5tNKs ++ rm /tmp/tmp.hF2XiikaKt /tmp/tmp.4y3Xz5tNKs ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.aR12sU32ky ++ mktemp + local LAST_ERR=/tmp/tmp.yizZwrSYIn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aR12sU32ky + cat /tmp/tmp.yizZwrSYIn + rm /tmp/tmp.aR12sU32ky /tmp/tmp.yizZwrSYIn + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:29:23+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JlJYbs2DhW +++ mktemp ++ local LAST_ERR=/tmp/tmp.du273Zfga6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JlJYbs2DhW ++ cat /tmp/tmp.du273Zfga6 ++ rm /tmp/tmp.JlJYbs2DhW /tmp/tmp.du273Zfga6 ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.mpjYxG1ule ++ mktemp + local LAST_ERR=/tmp/tmp.mWWINTEEwV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mpjYxG1ule + cat /tmp/tmp.mWWINTEEwV + rm /tmp/tmp.mpjYxG1ule /tmp/tmp.mWWINTEEwV + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:29:26+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JH5yVMHkSC +++ mktemp ++ local LAST_ERR=/tmp/tmp.kbOdMxCaPK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JH5yVMHkSC ++ cat /tmp/tmp.kbOdMxCaPK ++ rm /tmp/tmp.JH5yVMHkSC /tmp/tmp.kbOdMxCaPK ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.PzuoJpvoZu ++ mktemp + local LAST_ERR=/tmp/tmp.y1xR2uRj4m + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PzuoJpvoZu + cat /tmp/tmp.y1xR2uRj4m + rm /tmp/tmp.PzuoJpvoZu /tmp/tmp.y1xR2uRj4m + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- ++ get_backup_dest backup-gcp-cs ++ local backup_name=backup-gcp-cs ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed -e 's/.json$//' ++ kubectl_bin get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bheppb0PiX +++ mktemp ++ local LAST_ERR=/tmp/tmp.fnA9reNQRZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bheppb0PiX ++ cat /tmp/tmp.fnA9reNQRZ ++ rm /tmp/tmp.bheppb0PiX /tmp/tmp.fnA9reNQRZ ++ return 0 + backup_dest_gcp=operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z + gunzip + curl -s https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z/rs0/myApp.test.gz + run_recovery_check backup-gcp-cs some-name-rs0 + local backup=backup-gcp-cs + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LL7MOijpSO +++ mktemp ++ local LAST_ERR=/tmp/tmp.HzhPOhmVIn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LL7MOijpSO ++ cat /tmp/tmp.HzhPOhmVIn ++ rm /tmp/tmp.LL7MOijpSO /tmp/tmp.HzhPOhmVIn ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.XaFQolH4lO ++ mktemp + local LAST_ERR=/tmp/tmp.ulob0bONbA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XaFQolH4lO Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("0eea6a33-2162-4637-85f7-ddb27054c270") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.ulob0bONbA + rm /tmp/tmp.XaFQolH4lO /tmp/tmp.ulob0bONbA + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-26315 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:29:35+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-26315 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6CvAim00h1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kDb1e6rnuf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6CvAim00h1 ++ cat /tmp/tmp.kDb1e6rnuf ++ rm /tmp/tmp.6CvAim00h1 /tmp/tmp.kDb1e6rnuf ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.a0wxez91IW ++ mktemp + local LAST_ERR=/tmp/tmp.p3OR3JwzTQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.a0wxez91IW + cat /tmp/tmp.p3OR3JwzTQ + rm /tmp/tmp.a0wxez91IW /tmp/tmp.p3OR3JwzTQ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.78sjnJyyHF/find-2nd + run_restore backup-gcp-cs + local backup_name=backup-gcp-cs + /usr/bin/sed -e 's/backupName:/backupName: backup-gcp-cs/' + kubectl_bin apply -f - + /usr/bin/sed -e 's/name:/name: restore-backup-gcp-cs/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/restore.yml ++ mktemp + local LAST_OUT=/tmp/tmp.f6qhL03p7D ++ mktemp + local LAST_ERR=/tmp/tmp.mJ49IOae7o + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.f6qhL03p7D perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created + cat /tmp/tmp.mJ49IOae7o + rm /tmp/tmp.f6qhL03p7D /tmp/tmp.mJ49IOae7o + return 0 + wait_restore backup-gcp-cs some-name + local backup_name=backup-gcp-cs + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-gcp-cs object to be createdOK Waiting psmdb-restore/restore-backup-gcp-cs to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pKNo36lOeq +++ mktemp ++ local LAST_ERR=/tmp/tmp.aKZDAZVz71 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pKNo36lOeq ++ cat /tmp/tmp.aKZDAZVz71 ++ rm /tmp/tmp.pKNo36lOeq /tmp/tmp.aKZDAZVz71 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:30:02+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1sFZeU9OWe +++ mktemp ++ local LAST_ERR=/tmp/tmp.HksguXEf0z ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1sFZeU9OWe ++ cat /tmp/tmp.HksguXEf0z ++ rm /tmp/tmp.1sFZeU9OWe /tmp/tmp.HksguXEf0z ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.U5pXDHHXLs ++ mktemp + local LAST_ERR=/tmp/tmp.dmf1MzCTWs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.U5pXDHHXLs + cat /tmp/tmp.dmf1MzCTWs + rm /tmp/tmp.U5pXDHHXLs /tmp/tmp.dmf1MzCTWs + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:30:06+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R4lJ3cvKQe +++ mktemp ++ local LAST_ERR=/tmp/tmp.UrUn4FOwA9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R4lJ3cvKQe ++ cat /tmp/tmp.UrUn4FOwA9 ++ rm /tmp/tmp.R4lJ3cvKQe /tmp/tmp.UrUn4FOwA9 ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.lHqiSwOlWl ++ mktemp + local LAST_ERR=/tmp/tmp.CCa2GQUc0s + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lHqiSwOlWl + cat /tmp/tmp.CCa2GQUc0s + rm /tmp/tmp.lHqiSwOlWl /tmp/tmp.CCa2GQUc0s + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:30:09+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1mjfqTsdr8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uCvmn1tnDi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1mjfqTsdr8 ++ cat /tmp/tmp.uCvmn1tnDi ++ rm /tmp/tmp.1mjfqTsdr8 /tmp/tmp.uCvmn1tnDi ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ft8W7q9VH3 ++ mktemp + local LAST_ERR=/tmp/tmp.kQuX5SiIfR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ft8W7q9VH3 + cat /tmp/tmp.kQuX5SiIfR + rm /tmp/tmp.ft8W7q9VH3 /tmp/tmp.kQuX5SiIfR + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- ++ get_backup_dest backup-azure-blob ++ local backup_name=backup-azure-blob ++ sed 's|azure://||' ++ sed -e 's/.json$//' ++ kubectl_bin get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7yHl3Smfse +++ mktemp ++ local LAST_ERR=/tmp/tmp.YbtBGzntXE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7yHl3Smfse ++ cat /tmp/tmp.YbtBGzntXE ++ rm /tmp/tmp.7yHl3Smfse /tmp/tmp.YbtBGzntXE ++ return 0 + backup_dest_azure=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z + gunzip + curl -s https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z/rs0/myApp.test.gz + run_recovery_check backup-azure-blob some-name-rs0 + local backup=backup-azure-blob + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Pb5w5zpWeg +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zb7tkuT7WX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Pb5w5zpWeg ++ cat /tmp/tmp.Zb7tkuT7WX ++ rm /tmp/tmp.Pb5w5zpWeg /tmp/tmp.Zb7tkuT7WX ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.5RT7LsTMPN ++ mktemp + local LAST_ERR=/tmp/tmp.eWMMZjg5sT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5RT7LsTMPN Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("99a91aa7-9338-422f-a99d-27069789f63c") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.eWMMZjg5sT + rm /tmp/tmp.5RT7LsTMPN /tmp/tmp.eWMMZjg5sT + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-26315 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:30:18+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-26315 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EqxC6VYeLF +++ mktemp ++ local LAST_ERR=/tmp/tmp.uqZyrJa4gz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EqxC6VYeLF ++ cat /tmp/tmp.uqZyrJa4gz ++ rm /tmp/tmp.EqxC6VYeLF /tmp/tmp.uqZyrJa4gz ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.e66wuc9aA7 ++ mktemp + local LAST_ERR=/tmp/tmp.GzBwoh3K1O + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.e66wuc9aA7 + cat /tmp/tmp.GzBwoh3K1O + rm /tmp/tmp.e66wuc9aA7 /tmp/tmp.GzBwoh3K1O + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.78sjnJyyHF/find-2nd + run_restore backup-azure-blob + local backup_name=backup-azure-blob + kubectl_bin apply -f - + /usr/bin/sed -e 's/backupName:/backupName: backup-azure-blob/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-backup-azure-blob/' ++ mktemp + local LAST_OUT=/tmp/tmp.sWK55GoVGo ++ mktemp + local LAST_ERR=/tmp/tmp.zcDO45ZAS7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sWK55GoVGo perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created + cat /tmp/tmp.zcDO45ZAS7 + rm /tmp/tmp.sWK55GoVGo /tmp/tmp.zcDO45ZAS7 + return 0 + wait_restore backup-azure-blob some-name + local backup_name=backup-azure-blob + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-azure-blob object to be createdOK Waiting psmdb-restore/restore-backup-azure-blob to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yyes3Kie9U +++ mktemp ++ local LAST_ERR=/tmp/tmp.2IMUGXrWPt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yyes3Kie9U ++ cat /tmp/tmp.2IMUGXrWPt ++ rm /tmp/tmp.yyes3Kie9U /tmp/tmp.2IMUGXrWPt ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:30:45+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.myYBXWw9Zm +++ mktemp ++ local LAST_ERR=/tmp/tmp.tNxPsDxlku ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.myYBXWw9Zm ++ cat /tmp/tmp.tNxPsDxlku ++ rm /tmp/tmp.myYBXWw9Zm /tmp/tmp.tNxPsDxlku ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bL1J1tGUF9 ++ mktemp + local LAST_ERR=/tmp/tmp.8bd9zQWCwV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bL1J1tGUF9 + cat /tmp/tmp.8bd9zQWCwV + rm /tmp/tmp.bL1J1tGUF9 /tmp/tmp.8bd9zQWCwV + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:30:48+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.VCchXIqxYN +++ mktemp ++ local LAST_ERR=/tmp/tmp.N66IQ514WW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VCchXIqxYN ++ cat /tmp/tmp.N66IQ514WW ++ rm /tmp/tmp.VCchXIqxYN /tmp/tmp.N66IQ514WW ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.TE3e3wgWxA ++ mktemp + local LAST_ERR=/tmp/tmp.dNX2Ee3T1z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TE3e3wgWxA + cat /tmp/tmp.dNX2Ee3T1z + rm /tmp/tmp.TE3e3wgWxA /tmp/tmp.dNX2Ee3T1z + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:30:50+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YCh9UAakte +++ mktemp ++ local LAST_ERR=/tmp/tmp.byXh2Omq2E ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YCh9UAakte ++ cat /tmp/tmp.byXh2Omq2E ++ rm /tmp/tmp.YCh9UAakte /tmp/tmp.byXh2Omq2E ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.EsZKwJcgOe ++ mktemp + local LAST_ERR=/tmp/tmp.V9bRIodzlO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EsZKwJcgOe + cat /tmp/tmp.V9bRIodzlO + rm /tmp/tmp.EsZKwJcgOe /tmp/tmp.V9bRIodzlO + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' +++ mktemp ++ sed 's|s3://||' ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.vYMIOUVbaa +++ mktemp ++ local LAST_ERR=/tmp/tmp.0mICx2Prok ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vYMIOUVbaa ++ cat /tmp/tmp.0mICx2Prok ++ rm /tmp/tmp.vYMIOUVbaa /tmp/tmp.0mICx2Prok ++ return 0 + backup_dest_minio=operator-testing/2025-05-12T12:28:08Z + grep myApp.test.gz + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-05-12T12:28:08Z/rs0/ ++ mktemp + local LAST_OUT=/tmp/tmp.DaFM1vGQV6 ++ mktemp + local LAST_ERR=/tmp/tmp.sfZu8F3NVn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-05-12T12:28:08Z/rs0/ + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DaFM1vGQV6 + cat /tmp/tmp.sfZu8F3NVn If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-26315 + rm /tmp/tmp.DaFM1vGQV6 /tmp/tmp.sfZu8F3NVn + return 0 2025-05-12 12:28:12 55 myApp.test.gz + run_recovery_check backup-minio some-name-rs0 + local backup=backup-minio + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NEoir6ujJ0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HuIcaLUrdX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NEoir6ujJ0 ++ cat /tmp/tmp.HuIcaLUrdX ++ rm /tmp/tmp.NEoir6ujJ0 /tmp/tmp.HuIcaLUrdX ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.PwGcf7GaBs ++ mktemp + local LAST_ERR=/tmp/tmp.m28iweNUkI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PwGcf7GaBs Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("42fbcd44-c68d-46b1-a2f1-02d97d2ec61d") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.m28iweNUkI + rm /tmp/tmp.PwGcf7GaBs /tmp/tmp.m28iweNUkI + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-26315 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:31:01+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-26315 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X8cIq2d2ef +++ mktemp ++ local LAST_ERR=/tmp/tmp.5doIv3umyU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.X8cIq2d2ef ++ cat /tmp/tmp.5doIv3umyU ++ rm /tmp/tmp.X8cIq2d2ef /tmp/tmp.5doIv3umyU ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.6hSvGQdivY ++ mktemp + local LAST_ERR=/tmp/tmp.kK2WlX0SnL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6hSvGQdivY + cat /tmp/tmp.kK2WlX0SnL + rm /tmp/tmp.6hSvGQdivY /tmp/tmp.kK2WlX0SnL + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.78sjnJyyHF/find-2nd + run_restore backup-minio + local backup_name=backup-minio + /usr/bin/sed -e 's/name:/name: restore-backup-minio/' + /usr/bin/sed -e 's/backupName:/backupName: backup-minio/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/restore.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.CdXPIJvOPT ++ mktemp + local LAST_ERR=/tmp/tmp.w0aAO3nsev + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CdXPIJvOPT perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created + cat /tmp/tmp.w0aAO3nsev + rm /tmp/tmp.CdXPIJvOPT /tmp/tmp.w0aAO3nsev + return 0 + wait_restore backup-minio some-name + local backup_name=backup-minio + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio object to be createdOK Waiting psmdb-restore/restore-backup-minio to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mTFyjVOtei +++ mktemp ++ local LAST_ERR=/tmp/tmp.SdUKV2VhZ2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mTFyjVOtei ++ cat /tmp/tmp.SdUKV2VhZ2 ++ rm /tmp/tmp.mTFyjVOtei /tmp/tmp.SdUKV2VhZ2 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:31:24+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FXgl5zeQnY +++ mktemp ++ local LAST_ERR=/tmp/tmp.MG74J3NXaR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FXgl5zeQnY ++ cat /tmp/tmp.MG74J3NXaR ++ rm /tmp/tmp.FXgl5zeQnY /tmp/tmp.MG74J3NXaR ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.RuLpP4e2R0 ++ mktemp + local LAST_ERR=/tmp/tmp.lZXkXAHCYu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RuLpP4e2R0 + cat /tmp/tmp.lZXkXAHCYu + rm /tmp/tmp.RuLpP4e2R0 /tmp/tmp.lZXkXAHCYu + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:31:27+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GPDwPcLGCm +++ mktemp ++ local LAST_ERR=/tmp/tmp.lP7ooTFp0b ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GPDwPcLGCm ++ cat /tmp/tmp.lP7ooTFp0b ++ rm /tmp/tmp.GPDwPcLGCm /tmp/tmp.lP7ooTFp0b ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bzX3tzHYkQ ++ mktemp + local LAST_ERR=/tmp/tmp.zOLlMNrPeV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bzX3tzHYkQ + cat /tmp/tmp.zOLlMNrPeV + rm /tmp/tmp.bzX3tzHYkQ /tmp/tmp.zOLlMNrPeV + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:31:29+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y9P1PI0zYd +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fy1RNVmML7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.y9P1PI0zYd ++ cat /tmp/tmp.Fy1RNVmML7 ++ rm /tmp/tmp.y9P1PI0zYd /tmp/tmp.Fy1RNVmML7 ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.sUx6FMcJlG ++ mktemp + local LAST_ERR=/tmp/tmp.WFVxYhqD8B + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sUx6FMcJlG + cat /tmp/tmp.WFVxYhqD8B + rm /tmp/tmp.sUx6FMcJlG /tmp/tmp.WFVxYhqD8B + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + run_mongo 'use myApp\n db.dropUser("test1user")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.dropUser("test1user")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jPY9x0kN2x +++ mktemp ++ local LAST_ERR=/tmp/tmp.FDC9JT3jU3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jPY9x0kN2x ++ cat /tmp/tmp.FDC9JT3jU3 ++ rm /tmp/tmp.jPY9x0kN2x /tmp/tmp.FDC9JT3jU3 ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.dropUser("test1user")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.V0CaIuLrZi ++ mktemp + local LAST_ERR=/tmp/tmp.S7ZDySgMqm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.dropUser("test1user")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.V0CaIuLrZi Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d9401824-6836-473a-892c-fa2644c0c5d7") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.S7ZDySgMqm + rm /tmp/tmp.V0CaIuLrZi /tmp/tmp.S7ZDySgMqm + return 0 + run_mongo 'use myApp\n db.dropRole("test1role")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.dropRole("test1role")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JcUN9qtet8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IkfCkbplfV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JcUN9qtet8 ++ cat /tmp/tmp.IkfCkbplfV ++ rm /tmp/tmp.JcUN9qtet8 /tmp/tmp.IkfCkbplfV ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.dropRole("test1role")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.CmcMLbju55 ++ mktemp + local LAST_ERR=/tmp/tmp.9JHSeoDpVR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.dropRole("test1role")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CmcMLbju55 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("a94a81aa-cb29-4f74-b0be-18126e8d6aa4") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.9JHSeoDpVR + rm /tmp/tmp.CmcMLbju55 /tmp/tmp.9JHSeoDpVR + return 0 + desc 'selective restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- selective restore -- minio ----------------------------------------------------------------------------------- + run_recovery_check_selective backup-minio some-name-rs0 myApp test2 false test1user test1role + local backup=backup-minio + local cluster=some-name-rs0 + local database=myApp + local collection=test2 + local with_users_and_roles=false + local username=test1user + local role=test1role + restore_name=restore-backup-minio-selective + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W7jMDfAxZb +++ mktemp ++ local LAST_ERR=/tmp/tmp.N6ry0LuYKH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.W7jMDfAxZb ++ cat /tmp/tmp.N6ry0LuYKH ++ rm /tmp/tmp.W7jMDfAxZb /tmp/tmp.N6ry0LuYKH ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.WXXQkVYYHP ++ mktemp + local LAST_ERR=/tmp/tmp.RwBNxGHc37 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WXXQkVYYHP Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("cef3c390-3cf7-42d7-bf54-7a77941d291c") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.RwBNxGHc37 + rm /tmp/tmp.WXXQkVYYHP /tmp/tmp.RwBNxGHc37 + return 0 ++ collection_exists test2 ++ local collection=test2 ++ grep -v 'switched to' ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-26315 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-26315 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'index("test2") != null' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.yfMYOBzwiB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.NP6t1mRmW3 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.yfMYOBzwiB +++ cat /tmp/tmp.NP6t1mRmW3 +++ rm /tmp/tmp.yfMYOBzwiB /tmp/tmp.NP6t1mRmW3 +++ return 0 ++ local client_container=psmdb-client-66f577db5f-6ffbt ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AZ4emjSyQN +++ mktemp ++ local LAST_ERR=/tmp/tmp.d9cnDw176f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AZ4emjSyQN ++ cat /tmp/tmp.d9cnDw176f ++ rm /tmp/tmp.AZ4emjSyQN /tmp/tmp.d9cnDw176f ++ return 0 + [[ false == \t\r\u\e ]] + yq '.spec.backupName="backup-minio"' + kubectl_bin apply -f - + yq '.spec.selective.namespaces[0]="myApp.test"' + yq .spec.selective.withUsersAndRoles=false + yq '.metadata.name="restore-backup-minio-selective"' + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/restore.yml ++ mktemp + local LAST_OUT=/tmp/tmp.7mpUlxa2BY ++ mktemp + local LAST_ERR=/tmp/tmp.YKtSHWY5u6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7mpUlxa2BY perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-selective created + cat /tmp/tmp.YKtSHWY5u6 + rm /tmp/tmp.7mpUlxa2BY /tmp/tmp.YKtSHWY5u6 + return 0 + wait_restore backup-minio-selective some-name + local backup_name=backup-minio-selective + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-selective object to be createdOK Waiting psmdb-restore/restore-backup-minio-selective to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t6C8j2MOOH +++ mktemp ++ local LAST_ERR=/tmp/tmp.bN9lTF6Z8u ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.t6C8j2MOOH ++ cat /tmp/tmp.bN9lTF6Z8u ++ rm /tmp/tmp.t6C8j2MOOH /tmp/tmp.bN9lTF6Z8u ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo ++ collection_exists test2 ++ local collection=test2 ++ grep -v 'switched to' ++ jq 'index("test2") != null' ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-26315 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-26315 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.7N2gsaNFhk ++++ mktemp +++ local LAST_ERR=/tmp/tmp.AZLduaDd6R +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.7N2gsaNFhk +++ cat /tmp/tmp.AZLduaDd6R +++ rm /tmp/tmp.7N2gsaNFhk /tmp/tmp.AZLduaDd6R +++ return 0 ++ local client_container=psmdb-client-66f577db5f-6ffbt ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QwiEGexkRz +++ mktemp ++ local LAST_ERR=/tmp/tmp.Juv2794xZu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.QwiEGexkRz ++ cat /tmp/tmp.Juv2794xZu ++ rm /tmp/tmp.QwiEGexkRz /tmp/tmp.Juv2794xZu ++ return 0 + [[ false == \t\r\u\e ]] + [[ false == \t\r\u\e ]] + [[ false == \f\a\l\s\e ]] ++ user_exists test1user ++ local username=test1user ++ jq 'any(.[]; ._id==myApp.test1user)' ++ grep -v 'switched to' ++ run_mongo 'use myApp\n JSON.stringify(db.getUsers())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getUsers())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1user) jq: 1 compile error +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.vyTfWhlrdZ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.MF9qnxhPYU +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.vyTfWhlrdZ +++ cat /tmp/tmp.MF9qnxhPYU +++ rm /tmp/tmp.vyTfWhlrdZ /tmp/tmp.MF9qnxhPYU +++ return 0 ++ local client_container=psmdb-client-66f577db5f-6ffbt ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jROxkSpS22 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hhy90l9Imm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jROxkSpS22 ++ cat /tmp/tmp.Hhy90l9Imm ++ rm /tmp/tmp.jROxkSpS22 /tmp/tmp.Hhy90l9Imm ++ return 0 + [[ '' == \t\r\u\e ]] ++ role_exists test1role ++ local role=test1role ++ grep -v 'switched to' ++ run_mongo 'use myApp\n JSON.stringify(db.getRoles())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getRoles())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ jq 'any(.[]; ._id==myApp.test1role)' jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1role) jq: 1 compile error ++++ mktemp +++ local LAST_OUT=/tmp/tmp.DaTdJhC7jC ++++ mktemp +++ local LAST_ERR=/tmp/tmp.UmsdK1xbCi +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.DaTdJhC7jC +++ cat /tmp/tmp.UmsdK1xbCi +++ rm /tmp/tmp.DaTdJhC7jC /tmp/tmp.UmsdK1xbCi +++ return 0 ++ local client_container=psmdb-client-66f577db5f-6ffbt ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YNe8lbQDn8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fEgA9skykJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YNe8lbQDn8 ++ cat /tmp/tmp.fEgA9skykJ ++ rm /tmp/tmp.YNe8lbQDn8 /tmp/tmp.fEgA9skykJ ++ return 0 + [[ '' == \t\r\u\e ]] + kubectl_bin delete psmdb-restore restore-backup-minio-selective ++ mktemp + local LAST_OUT=/tmp/tmp.QwC96rTJGj ++ mktemp + local LAST_ERR=/tmp/tmp.ssrCFEranq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-restore restore-backup-minio-selective + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QwC96rTJGj perconaservermongodbrestore.psmdb.percona.com "restore-backup-minio-selective" deleted + cat /tmp/tmp.ssrCFEranq + rm /tmp/tmp.QwC96rTJGj /tmp/tmp.ssrCFEranq + return 0 + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5FcYaptvvS +++ mktemp ++ local LAST_ERR=/tmp/tmp.SZbeNzwj5Y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5FcYaptvvS ++ cat /tmp/tmp.SZbeNzwj5Y ++ rm /tmp/tmp.5FcYaptvvS /tmp/tmp.SZbeNzwj5Y ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.HRPzPDx55f ++ mktemp + local LAST_ERR=/tmp/tmp.TF1LMBW4mp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HRPzPDx55f Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("2b538e45-27fa-47d4-bdeb-ec95bfe1d9ce") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.TF1LMBW4mp + rm /tmp/tmp.HRPzPDx55f /tmp/tmp.TF1LMBW4mp + return 0 + desc 'selective restore with users and roles -- minio' + set +o xtrace ----------------------------------------------------------------------------------- selective restore with users and roles -- minio ----------------------------------------------------------------------------------- + run_recovery_check_selective backup-minio some-name-rs0 myApp test2 true test1user test1role + local backup=backup-minio + local cluster=some-name-rs0 + local database=myApp + local collection=test2 + local with_users_and_roles=true + local username=test1user + local role=test1role + restore_name=restore-backup-minio-selective + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ob9qbE4mwi +++ mktemp ++ local LAST_ERR=/tmp/tmp.sV6AeOoWfI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ob9qbE4mwi ++ cat /tmp/tmp.sV6AeOoWfI ++ rm /tmp/tmp.ob9qbE4mwi /tmp/tmp.sV6AeOoWfI ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.PpaxUFOBVb ++ mktemp + local LAST_ERR=/tmp/tmp.gcnoZVw9xQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PpaxUFOBVb Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("795e4a9b-1c2d-4baf-8c63-01dd668b614d") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.gcnoZVw9xQ + rm /tmp/tmp.PpaxUFOBVb /tmp/tmp.gcnoZVw9xQ + return 0 ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-26315 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-26315 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ jq 'index("test2") != null' ++++ mktemp ++ grep -v 'switched to' +++ local LAST_OUT=/tmp/tmp.J15OwrjThD ++++ mktemp +++ local LAST_ERR=/tmp/tmp.R0noIOLAU1 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.J15OwrjThD +++ cat /tmp/tmp.R0noIOLAU1 +++ rm /tmp/tmp.J15OwrjThD /tmp/tmp.R0noIOLAU1 +++ return 0 ++ local client_container=psmdb-client-66f577db5f-6ffbt ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ur54H3qChf +++ mktemp ++ local LAST_ERR=/tmp/tmp.EgT8GOWxrE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ur54H3qChf ++ cat /tmp/tmp.EgT8GOWxrE ++ rm /tmp/tmp.Ur54H3qChf /tmp/tmp.EgT8GOWxrE ++ return 0 + [[ false == \t\r\u\e ]] + yq .spec.selective.withUsersAndRoles=true + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/restore.yml + kubectl_bin apply -f - + yq '.metadata.name="restore-backup-minio-selective"' ++ mktemp + yq '.spec.selective.namespaces[0]="myApp.test"' + yq '.spec.backupName="backup-minio"' + local LAST_OUT=/tmp/tmp.XPz2iH1vTZ ++ mktemp + local LAST_ERR=/tmp/tmp.eri9ZFG1lX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XPz2iH1vTZ perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-selective created + cat /tmp/tmp.eri9ZFG1lX + rm /tmp/tmp.XPz2iH1vTZ /tmp/tmp.eri9ZFG1lX + return 0 + wait_restore backup-minio-selective some-name + local backup_name=backup-minio-selective + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-selective object to be createdOK Waiting psmdb-restore/restore-backup-minio-selective to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IetMFpKFZH +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nm13PaOIyU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IetMFpKFZH ++ cat /tmp/tmp.Nm13PaOIyU ++ rm /tmp/tmp.IetMFpKFZH /tmp/tmp.Nm13PaOIyU ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo ++ collection_exists test2 ++ local collection=test2 ++ grep -v 'switched to' ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-26315 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-26315 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ jq 'index("test2") != null' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.trnjIIaZ7X ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Nh0XslNlvj +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.trnjIIaZ7X +++ cat /tmp/tmp.Nh0XslNlvj +++ rm /tmp/tmp.trnjIIaZ7X /tmp/tmp.Nh0XslNlvj +++ return 0 ++ local client_container=psmdb-client-66f577db5f-6ffbt ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i602OwmuEp +++ mktemp ++ local LAST_ERR=/tmp/tmp.6n4G896A5e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.i602OwmuEp ++ cat /tmp/tmp.6n4G896A5e ++ rm /tmp/tmp.i602OwmuEp /tmp/tmp.6n4G896A5e ++ return 0 + [[ false == \t\r\u\e ]] + [[ true == \t\r\u\e ]] ++ user_exists test1user ++ local username=test1user ++ run_mongo 'use myApp\n JSON.stringify(db.getUsers())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getUsers())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'any(.[]; ._id==myApp.test1user)' ++ grep -v 'switched to' jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1user) jq: 1 compile error +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.v4ixwJ2Igl ++++ mktemp +++ local LAST_ERR=/tmp/tmp.CuNAX7cXVb +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.v4ixwJ2Igl +++ cat /tmp/tmp.CuNAX7cXVb +++ rm /tmp/tmp.v4ixwJ2Igl /tmp/tmp.CuNAX7cXVb +++ return 0 ++ local client_container=psmdb-client-66f577db5f-6ffbt ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RDw9X2H5J2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dThorYORcz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RDw9X2H5J2 ++ cat /tmp/tmp.dThorYORcz ++ rm /tmp/tmp.RDw9X2H5J2 /tmp/tmp.dThorYORcz ++ return 0 + [[ '' == \f\a\l\s\e ]] ++ role_exists test1role ++ local role=test1role ++ run_mongo 'use myApp\n JSON.stringify(db.getRoles())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getRoles())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'any(.[]; ._id==myApp.test1role)' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ grep -v 'switched to' jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1role) jq: 1 compile error ++++ mktemp +++ local LAST_OUT=/tmp/tmp.EH7VXHpVgK ++++ mktemp +++ local LAST_ERR=/tmp/tmp.TxIoB0tSQL +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.EH7VXHpVgK +++ cat /tmp/tmp.TxIoB0tSQL +++ rm /tmp/tmp.EH7VXHpVgK /tmp/tmp.TxIoB0tSQL +++ return 0 ++ local client_container=psmdb-client-66f577db5f-6ffbt ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KIDK4lVSab +++ mktemp ++ local LAST_ERR=/tmp/tmp.nxYltb1sZZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KIDK4lVSab ++ cat /tmp/tmp.nxYltb1sZZ ++ rm /tmp/tmp.KIDK4lVSab /tmp/tmp.nxYltb1sZZ ++ return 0 + [[ '' == \f\a\l\s\e ]] + desc 'restore from backup source, with storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, with storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ sed -e 's/.json$//' ++ local LAST_OUT=/tmp/tmp.DJQvOHfOjo +++ mktemp ++ local LAST_ERR=/tmp/tmp.9LqS5udjEG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DJQvOHfOjo ++ cat /tmp/tmp.9LqS5udjEG ++ rm /tmp/tmp.DJQvOHfOjo /tmp/tmp.9LqS5udjEG ++ return 0 + backup_dest_minio=operator-testing/2025-05-12T12:28:08Z + run_recovery_check_bkp_source backup-minio operator-testing/2025-05-12T12:28:08Z some-name-rs0 backup-minio-source-0 + local backup=backup-minio + local backup_dest=operator-testing/2025-05-12T12:28:08Z + local cluster=some-name-rs0 + local source=backup-minio-source-0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uZEmscClGB +++ mktemp ++ local LAST_ERR=/tmp/tmp.4UyVW2F6p4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uZEmscClGB ++ cat /tmp/tmp.4UyVW2F6p4 ++ rm /tmp/tmp.uZEmscClGB /tmp/tmp.4UyVW2F6p4 ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.fQa0Q9Hw8M ++ mktemp + local LAST_ERR=/tmp/tmp.cgULtHTzOl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fQa0Q9Hw8M Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("9bc93af1-e3c8-4dc8-9e1d-b7aa64b041b7") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.cgULtHTzOl + rm /tmp/tmp.fQa0Q9Hw8M /tmp/tmp.cgULtHTzOl + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-26315 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:33:10+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-26315 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PLt0LImGuM +++ mktemp ++ local LAST_ERR=/tmp/tmp.lAuiNi5dBT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PLt0LImGuM ++ cat /tmp/tmp.lAuiNi5dBT ++ rm /tmp/tmp.PLt0LImGuM /tmp/tmp.lAuiNi5dBT ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.fpDwXKLSNU ++ mktemp + local LAST_ERR=/tmp/tmp.ZSCLe0Flub + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fpDwXKLSNU + cat /tmp/tmp.ZSCLe0Flub + rm /tmp/tmp.fpDwXKLSNU /tmp/tmp.ZSCLe0Flub + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.78sjnJyyHF/find-2nd + run_restore_backupsource backup-minio-source-0 operator-testing/2025-05-12T12:28:08Z + local backupName=backup-minio-source-0 + local backupDest=operator-testing/2025-05-12T12:28:08Z + local storageName= + desc 'run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2025-05-12T12:28:08Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2025-05-12T12:28:08Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f - + /usr/bin/sed -e 's|BACKUP-NAME|operator-testing/2025-05-12T12:28:08Z|' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/restore-backupsource.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio-source-0/' ++ mktemp + local LAST_OUT=/tmp/tmp.OxpizjETbP + /usr/bin/sed -e /storageName/d ++ mktemp + local LAST_ERR=/tmp/tmp.PhayjZHMsF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OxpizjETbP perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-0 created + cat /tmp/tmp.PhayjZHMsF + rm /tmp/tmp.OxpizjETbP /tmp/tmp.PhayjZHMsF + return 0 + return + wait_restore backup-minio-source-0 some-name + local backup_name=backup-minio-source-0 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-source-0 object to be createdOK Waiting psmdb-restore/restore-backup-minio-source-0 to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NZSDo9UtTD +++ mktemp ++ local LAST_ERR=/tmp/tmp.NPCWPatAZR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NZSDo9UtTD ++ cat /tmp/tmp.NPCWPatAZR ++ rm /tmp/tmp.NZSDo9UtTD /tmp/tmp.NPCWPatAZR ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:33:34+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aqGpwBZosd +++ mktemp ++ local LAST_ERR=/tmp/tmp.PmLkdUe8Td ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aqGpwBZosd ++ cat /tmp/tmp.PmLkdUe8Td ++ rm /tmp/tmp.aqGpwBZosd /tmp/tmp.PmLkdUe8Td ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.gcqd8U8J8P ++ mktemp + local LAST_ERR=/tmp/tmp.kRcsiHM4bO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gcqd8U8J8P + cat /tmp/tmp.kRcsiHM4bO + rm /tmp/tmp.gcqd8U8J8P /tmp/tmp.kRcsiHM4bO + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:33:36+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HSAfn3izQ9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hnrEa9cCTW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HSAfn3izQ9 ++ cat /tmp/tmp.hnrEa9cCTW ++ rm /tmp/tmp.HSAfn3izQ9 /tmp/tmp.hnrEa9cCTW ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.AjfPQ9oEWz ++ mktemp + local LAST_ERR=/tmp/tmp.V6HyFSsZhC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AjfPQ9oEWz + cat /tmp/tmp.V6HyFSsZhC + rm /tmp/tmp.AjfPQ9oEWz /tmp/tmp.V6HyFSsZhC + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:33:40+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local driver=mongodb + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YdXS7R5k6s +++ mktemp ++ local LAST_ERR=/tmp/tmp.f89ZzTnmUs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YdXS7R5k6s ++ cat /tmp/tmp.f89ZzTnmUs ++ rm /tmp/tmp.YdXS7R5k6s /tmp/tmp.f89ZzTnmUs ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.3mqKS9dv4J ++ mktemp + local LAST_ERR=/tmp/tmp.MLtQZfr9kg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3mqKS9dv4J + cat /tmp/tmp.MLtQZfr9kg + rm /tmp/tmp.3mqKS9dv4J /tmp/tmp.MLtQZfr9kg + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + desc 'restore from backup source, no storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, no storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed -e 's/.json$//' +++ mktemp ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.DCtWjsBjHi +++ mktemp ++ local LAST_ERR=/tmp/tmp.f0IBWlXgAO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DCtWjsBjHi ++ cat /tmp/tmp.f0IBWlXgAO ++ rm /tmp/tmp.DCtWjsBjHi /tmp/tmp.f0IBWlXgAO ++ return 0 + backup_dest_minio=operator-testing/2025-05-12T12:28:08Z + run_recovery_check_bkp_source backup-minio operator-testing/2025-05-12T12:28:08Z some-name-rs0 backup-minio-source-1 + local backup=backup-minio + local backup_dest=operator-testing/2025-05-12T12:28:08Z + local cluster=some-name-rs0 + local source=backup-minio-source-1 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-26315 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HCk0W3NzDi +++ mktemp ++ local LAST_ERR=/tmp/tmp.DrZK0V7P5t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HCk0W3NzDi ++ cat /tmp/tmp.DrZK0V7P5t ++ rm /tmp/tmp.HCk0W3NzDi /tmp/tmp.DrZK0V7P5t ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.MM1MKzsV0o ++ mktemp + local LAST_ERR=/tmp/tmp.qUeDNnFYW2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MM1MKzsV0o Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("5657e55b-4426-4409-8d3e-c99a0be08f41") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.qUeDNnFYW2 + rm /tmp/tmp.MM1MKzsV0o /tmp/tmp.qUeDNnFYW2 + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-26315 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:33:46+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-26315 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7koQWKgDtN +++ mktemp ++ local LAST_ERR=/tmp/tmp.vFsM9M43bV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7koQWKgDtN ++ cat /tmp/tmp.vFsM9M43bV ++ rm /tmp/tmp.7koQWKgDtN /tmp/tmp.vFsM9M43bV ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.RhwlgN30vV ++ mktemp + local LAST_ERR=/tmp/tmp.VeQcsaDKE8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RhwlgN30vV + cat /tmp/tmp.VeQcsaDKE8 + rm /tmp/tmp.RhwlgN30vV /tmp/tmp.VeQcsaDKE8 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.78sjnJyyHF/find-2nd + run_restore_backupsource backup-minio-source-1 operator-testing/2025-05-12T12:28:08Z + local backupName=backup-minio-source-1 + local backupDest=operator-testing/2025-05-12T12:28:08Z + local storageName= + desc 'run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2025-05-12T12:28:08Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2025-05-12T12:28:08Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/restore-backupsource.yml ++ mktemp + local LAST_OUT=/tmp/tmp.X2Ed8m6E9A ++ mktemp + local LAST_ERR=/tmp/tmp.dryNmkZT9W + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's|BACKUP-NAME|operator-testing/2025-05-12T12:28:08Z|' + /usr/bin/sed -e 's/name:/name: restore-backup-minio-source-1/' + /usr/bin/sed -e /storageName/d + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.X2Ed8m6E9A perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-1 created + cat /tmp/tmp.dryNmkZT9W + rm /tmp/tmp.X2Ed8m6E9A /tmp/tmp.dryNmkZT9W + return 0 + return + wait_restore backup-minio-source-1 some-name + local backup_name=backup-minio-source-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-source-1 object to be createdOK Waiting psmdb-restore/restore-backup-minio-source-1 to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pqjRhjFqYN +++ mktemp ++ local LAST_ERR=/tmp/tmp.qUnGAW1FuK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pqjRhjFqYN ++ cat /tmp/tmp.qUnGAW1FuK ++ rm /tmp/tmp.pqjRhjFqYN /tmp/tmp.qUnGAW1FuK ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:34:09+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.37tVUD1fKm +++ mktemp ++ local LAST_ERR=/tmp/tmp.bFAuzPqn0Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.37tVUD1fKm ++ cat /tmp/tmp.bFAuzPqn0Q ++ rm /tmp/tmp.37tVUD1fKm /tmp/tmp.bFAuzPqn0Q ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.dajvrBU1T8 ++ mktemp + local LAST_ERR=/tmp/tmp.5jdEYOr5aB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dajvrBU1T8 + cat /tmp/tmp.5jdEYOr5aB + rm /tmp/tmp.dajvrBU1T8 /tmp/tmp.5jdEYOr5aB + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:34:11+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hoC5XOfaiH +++ mktemp ++ local LAST_ERR=/tmp/tmp.kurAclh7hw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hoC5XOfaiH ++ cat /tmp/tmp.kurAclh7hw ++ rm /tmp/tmp.hoC5XOfaiH /tmp/tmp.kurAclh7hw ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.9P96Kyo4uP ++ mktemp + local LAST_ERR=/tmp/tmp.qi9yyxpI7y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9P96Kyo4uP + cat /tmp/tmp.qi9yyxpI7y + rm /tmp/tmp.9P96Kyo4uP /tmp/tmp.qi9yyxpI7y + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-12T12:34:13+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ufipHDCIED +++ mktemp ++ local LAST_ERR=/tmp/tmp.5XH8oj3KlI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ufipHDCIED ++ cat /tmp/tmp.5XH8oj3KlI ++ rm /tmp/tmp.ufipHDCIED /tmp/tmp.5XH8oj3KlI ++ return 0 + local client_container=psmdb-client-66f577db5f-6ffbt + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.55UfFvkVEt ++ mktemp + local LAST_ERR=/tmp/tmp.KUQ34tIqI5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-6ffbt -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-26315.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.55UfFvkVEt + cat /tmp/tmp.KUQ34tIqI5 + rm /tmp/tmp.55UfFvkVEt /tmp/tmp.KUQ34tIqI5 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/compare/find.json /tmp/tmp.78sjnJyyHF/find + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.p8J5917lNP ++ mktemp + local LAST_ERR=/tmp/tmp.M98lOiY3Tc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.p8J5917lNP perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted + cat /tmp/tmp.M98lOiY3Tc + rm /tmp/tmp.p8J5917lNP /tmp/tmp.M98lOiY3Tc + return 0 ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ cat +++ mktemp ++ grep -c operator-testing/2025-05-12T12:28:08Z ++ local LAST_OUT=/tmp/tmp.oGD4J3uR6n +++ mktemp ++ local LAST_ERR=/tmp/tmp.6CZx6PxIKl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oGD4J3uR6n ++ cat /tmp/tmp.6CZx6PxIKl ++ rm /tmp/tmp.oGD4J3uR6n /tmp/tmp.6CZx6PxIKl ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-12T12:27:57Z aws-s3 + path=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-12T12:27:57Z + storage_name=aws-s3 + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-12T12:27:57Z + [[ 403 -eq 403 ]] + check_backup_deletion https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z gcp-cs + path=https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z + storage_name=gcp-cs + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z + [[ 404 -eq 404 ]] + check_backup_deletion https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z azure-blob + path=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z + storage_name=azure-blob + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z + [[ 404 -eq 404 ]] + desc 'checking backup deletion without cr' + set +o xtrace ----------------------------------------------------------------------------------- checking backup deletion without cr ----------------------------------------------------------------------------------- + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.sk1RN5bpDd ++ mktemp + local LAST_ERR=/tmp/tmp.x5lVqNOwqD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sk1RN5bpDd perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.x5lVqNOwqD + rm /tmp/tmp.sk1RN5bpDd /tmp/tmp.x5lVqNOwqD + return 0 + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local type=logical + desc 'run backup backup-aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/backup-aws-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.UQ6yJo3DnV ++ mktemp + local LAST_ERR=/tmp/tmp.To4AUb7ieT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UQ6yJo3DnV perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.To4AUb7ieT + rm /tmp/tmp.UQ6yJo3DnV /tmp/tmp.To4AUb7ieT + return 0 + run_backup gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + local type=logical + desc 'run backup backup-gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-gcp-cs" | .spec.storageName = "gcp-cs" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/backup-gcp-cs.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.LJsf5wv0CF ++ mktemp + local LAST_ERR=/tmp/tmp.ykA1FF1PnS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LJsf5wv0CF perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.ykA1FF1PnS + rm /tmp/tmp.LJsf5wv0CF /tmp/tmp.ykA1FF1PnS + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local type=logical + desc 'run backup backup-azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/e2e-tests/demand-backup/conf/backup-azure-blob.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.3gTue5mM5W ++ mktemp + local LAST_ERR=/tmp/tmp.v70UZo26aS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3gTue5mM5W perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.v70UZo26aS + rm /tmp/tmp.3gTue5mM5W /tmp/tmp.v70UZo26aS + return 0 + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state. + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state...... + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + local target_state=ready + set +o xtrace waiting for backup-gcp-cs to reach ready state.......... + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state. + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.o3nSbkHw8B ++ mktemp + local LAST_ERR=/tmp/tmp.Mh5Vnl5fuw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o3nSbkHw8B perconaservermongodb.psmdb.percona.com "some-name" deleted + cat /tmp/tmp.Mh5Vnl5fuw + rm /tmp/tmp.o3nSbkHw8B /tmp/tmp.Mh5Vnl5fuw + return 0 + sleep 60 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.PlilwFahsh ++ mktemp + local LAST_ERR=/tmp/tmp.t0ank9BSC9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PlilwFahsh perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted + cat /tmp/tmp.t0ank9BSC9 + rm /tmp/tmp.PlilwFahsh /tmp/tmp.t0ank9BSC9 + return 0 ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ grep -c operator-testing/2025-05-12T12:28:08Z ++ cat +++ mktemp ++ local LAST_OUT=/tmp/tmp.hqkqvAUVJZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ikNz1TRthg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hqkqvAUVJZ ++ cat /tmp/tmp.ikNz1TRthg If you don't see a command prompt, try pressing enter. ++ rm /tmp/tmp.hqkqvAUVJZ /tmp/tmp.ikNz1TRthg ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-12T12:27:57Z aws-s3 + path=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-12T12:27:57Z + storage_name=aws-s3 + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-12T12:27:57Z + [[ 403 -eq 403 ]] + check_backup_deletion https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z gcp-cs + path=https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z + storage_name=gcp-cs + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-12T12:28:31Z + [[ 404 -eq 404 ]] + check_backup_deletion https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z azure-blob + path=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z + storage_name=azure-blob + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-12T12:28:20Z + [[ 404 -eq 404 ]] + desc 'check for passwords leak' + set +o xtrace ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- + check_passwords_leak + local secrets + local passwords + local pods ++ kubectl_bin get secrets -o json ++ jq -r '.items[].data | to_entries | .[] | select(.key | (contains("_PASSWORD"))) | .value' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0umBu5K2yb +++ mktemp ++ local LAST_ERR=/tmp/tmp.WdpLZ6z7kP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0umBu5K2yb ++ cat /tmp/tmp.WdpLZ6z7kP ++ rm /tmp/tmp.0umBu5K2yb /tmp/tmp.WdpLZ6z7kP ++ return 0 + secrets='YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2' + echo secrets=YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 secrets=YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo + passwords='backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2' + echo passwords=backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 ++ kubectl_bin get pods -o name +++ mktemp ++ local LAST_OUT=/tmp/tmp.JEpNdjZ188 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2YOwqcwbCS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods -o name ++ awk -F / '{print $2}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JEpNdjZ188 ++ cat /tmp/tmp.2YOwqcwbCS ++ rm /tmp/tmp.JEpNdjZ188 /tmp/tmp.2YOwqcwbCS ++ return 0 + pods='minio-service-86dfccd949-klrtf psmdb-client-66f577db5f-6ffbt' + echo pods=minio-service-86dfccd949-klrtf psmdb-client-66f577db5f-6ffbt pods=minio-service-86dfccd949-klrtf psmdb-client-66f577db5f-6ffbt + collect_logs demand-backup-26315 + local containers + local count + NS=demand-backup-26315 + for p in '$pods' ++ kubectl_bin -n demand-backup-26315 get pod minio-service-86dfccd949-klrtf -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A3m29UrNtZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.XBu2KogGAr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n demand-backup-26315 get pod minio-service-86dfccd949-klrtf -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.A3m29UrNtZ ++ cat /tmp/tmp.XBu2KogGAr ++ rm /tmp/tmp.A3m29UrNtZ /tmp/tmp.XBu2KogGAr ++ return 0 + containers=minio + for c in '$containers' + [[ minio =~ pmm ]] + kubectl_bin -n demand-backup-26315 logs minio-service-86dfccd949-klrtf -c minio ++ mktemp + local LAST_OUT=/tmp/tmp.g4s55AdzBT ++ mktemp + local LAST_ERR=/tmp/tmp.ApZ1Lbq4ee + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n demand-backup-26315 logs minio-service-86dfccd949-klrtf -c minio + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.g4s55AdzBT + cat /tmp/tmp.ApZ1Lbq4ee + rm /tmp/tmp.g4s55AdzBT /tmp/tmp.ApZ1Lbq4ee + return 0 + echo logs saved in: /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt logs saved in: /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-minio-service-86dfccd949-klrtf-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + for p in '$pods' ++ kubectl_bin -n demand-backup-26315 get pod psmdb-client-66f577db5f-6ffbt -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hMdelsZPXw +++ mktemp ++ local LAST_ERR=/tmp/tmp.FFfOruwjQi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n demand-backup-26315 get pod psmdb-client-66f577db5f-6ffbt -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hMdelsZPXw ++ cat /tmp/tmp.FFfOruwjQi ++ rm /tmp/tmp.hMdelsZPXw /tmp/tmp.FFfOruwjQi ++ return 0 + containers=psmdb-client + for c in '$containers' + [[ psmdb-client =~ pmm ]] + kubectl_bin -n demand-backup-26315 logs psmdb-client-66f577db5f-6ffbt -c psmdb-client ++ mktemp + local LAST_OUT=/tmp/tmp.MScls2C5o2 ++ mktemp + local LAST_ERR=/tmp/tmp.eWhN6DLOAy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n demand-backup-26315 logs psmdb-client-66f577db5f-6ffbt -c psmdb-client + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MScls2C5o2 + cat /tmp/tmp.eWhN6DLOAy + rm /tmp/tmp.MScls2C5o2 /tmp/tmp.eWhN6DLOAy + return 0 + echo logs saved in: /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt logs saved in: /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-psmdb-client-66f577db5f-6ffbt-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + '[' -n psmdb-operator ']' ++ awk -F / '{print $2}' ++ kubectl_bin -n psmdb-operator get pods -o name +++ mktemp ++ local LAST_OUT=/tmp/tmp.mnwHfhhE3D +++ mktemp ++ local LAST_ERR=/tmp/tmp.DsvEgYURJ4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n psmdb-operator get pods -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mnwHfhhE3D ++ cat /tmp/tmp.DsvEgYURJ4 ++ rm /tmp/tmp.mnwHfhhE3D /tmp/tmp.DsvEgYURJ4 ++ return 0 + pods=percona-server-mongodb-operator-c4f9b4485-2qnm8 + collect_logs psmdb-operator + local containers + local count + NS=psmdb-operator + for p in '$pods' ++ kubectl_bin -n psmdb-operator get pod percona-server-mongodb-operator-c4f9b4485-2qnm8 -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X5XsAZ50DM +++ mktemp ++ local LAST_ERR=/tmp/tmp.hYgLdSUlVs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n psmdb-operator get pod percona-server-mongodb-operator-c4f9b4485-2qnm8 -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.X5XsAZ50DM ++ cat /tmp/tmp.hYgLdSUlVs ++ rm /tmp/tmp.X5XsAZ50DM /tmp/tmp.hYgLdSUlVs ++ return 0 + containers=percona-server-mongodb-operator + for c in '$containers' + [[ percona-server-mongodb-operator =~ pmm ]] + kubectl_bin -n psmdb-operator logs percona-server-mongodb-operator-c4f9b4485-2qnm8 -c percona-server-mongodb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.JvzoRmQ6tS ++ mktemp + local LAST_ERR=/tmp/tmp.KlpZ5uQeaz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n psmdb-operator logs percona-server-mongodb-operator-c4f9b4485-2qnm8 -c percona-server-mongodb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JvzoRmQ6tS + cat /tmp/tmp.KlpZ5uQeaz + rm /tmp/tmp.JvzoRmQ6tS /tmp/tmp.KlpZ5uQeaz + return 0 + echo logs saved in: /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt logs saved in: /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.78sjnJyyHF/logs_output-percona-server-mongodb-operator-c4f9b4485-2qnm8-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + destroy demand-backup-26315 + local namespace=demand-backup-26315 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.sr0OdkRzFG +++ mktemp ++ local LAST_ERR=/tmp/tmp.XlZ81AaXLU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sr0OdkRzFG ++ cat /tmp/tmp.XlZ81AaXLU No resources found in demand-backup-26315 namespace. ++ rm /tmp/tmp.sr0OdkRzFG /tmp/tmp.XlZ81AaXLU ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.xcCQFQc3P3 ++ mktemp + local LAST_ERR=/tmp/tmp.52y7hzKNcQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xcCQFQc3P3 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.52y7hzKNcQ + rm /tmp/tmp.xcCQFQc3P3 /tmp/tmp.52y7hzKNcQ + return 0 ++ grep -v '\-\-\-' ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/crd.yaml + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.oeTCtEMozN ++ mktemp + local LAST_ERR=/tmp/tmp.8eFG0OiHha + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oeTCtEMozN + cat /tmp/tmp.8eFG0OiHha + rm /tmp/tmp.oeTCtEMozN /tmp/tmp.8eFG0OiHha + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.QfkDKd4hVA ++ mktemp + local LAST_ERR=/tmp/tmp.Kl3dQymL1d + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QfkDKd4hVA + cat /tmp/tmp.Kl3dQymL1d + rm /tmp/tmp.QfkDKd4hVA /tmp/tmp.Kl3dQymL1d + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.nN0WGh7fB1 ++ mktemp + local LAST_ERR=/tmp/tmp.4JoCQ0F588 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nN0WGh7fB1 + cat /tmp/tmp.4JoCQ0F588 + rm /tmp/tmp.nN0WGh7fB1 /tmp/tmp.4JoCQ0F588 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.CXRS3C0b7q ++ mktemp + local LAST_ERR=/tmp/tmp.LCF4EM67Qi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1904/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CXRS3C0b7q clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.LCF4EM67Qi + rm /tmp/tmp.CXRS3C0b7q /tmp/tmp.LCF4EM67Qi + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.7KS5KOo9xE ++ mktemp + local LAST_ERR=/tmp/tmp.R4Q5p8YWSq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.7KS5KOo9xE + cat /tmp/tmp.R4Q5p8YWSq Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.7KS5KOo9xE + cat /tmp/tmp.R4Q5p8YWSq Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.7KS5KOo9xE + cat /tmp/tmp.R4Q5p8YWSq Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.7KS5KOo9xE + cat /tmp/tmp.R4Q5p8YWSq Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.7KS5KOo9xE /tmp/tmp.R4Q5p8YWSq + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace demand-backup-26315 + rm -rf /tmp/tmp.78sjnJyyHF + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.BAT1q80eeo + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.1e7YoZ3qJP ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.4OLTTSAq1l + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.sQiLCFtptT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace demand-backup-26315 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator