Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/logs/demand-backup.log WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.33) and server (1.30) exceeds the supported minor version skew of +/-1 + create_infra demand-backup-13973 + local ns=demand-backup-13973 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.yvjnqDABr3 ++ mktemp + local LAST_ERR=/tmp/tmp.Gm7CAkKHza + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yvjnqDABr3 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Gm7CAkKHza + rm /tmp/tmp.yvjnqDABr3 /tmp/tmp.Gm7CAkKHza + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.DGLQ4PUUB0 ++ mktemp + local LAST_ERR=/tmp/tmp.njlqZ6SOyZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DGLQ4PUUB0 customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com condition met + cat /tmp/tmp.njlqZ6SOyZ + rm /tmp/tmp.DGLQ4PUUB0 /tmp/tmp.njlqZ6SOyZ + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.w9kJI5oyOg ++ mktemp + local LAST_ERR=/tmp/tmp.XI7uRxGocd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.w9kJI5oyOg + cat /tmp/tmp.XI7uRxGocd + rm /tmp/tmp.w9kJI5oyOg /tmp/tmp.XI7uRxGocd + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.gEDivQAadK ++ mktemp + local LAST_ERR=/tmp/tmp.77ij4bbsvT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gEDivQAadK + cat /tmp/tmp.77ij4bbsvT + rm /tmp/tmp.gEDivQAadK /tmp/tmp.77ij4bbsvT + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.26qMvzx3AX ++ mktemp + local LAST_ERR=/tmp/tmp.haNpC3d6oG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.26qMvzx3AX clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.haNpC3d6oG + rm /tmp/tmp.26qMvzx3AX /tmp/tmp.haNpC3d6oG + return 0 + check_crd_for_deletion PR-1912-ab1be45a + local git_tag=PR-1912-ab1be45a ++ yq eval .metadata.name ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1912-ab1be45a/deploy/crd.yaml ++ /usr/bin/sed s/---//g ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IHy0HsTxyc +++ mktemp ++ local LAST_ERR=/tmp/tmp.aidjPQNOLF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.IHy0HsTxyc ++ cat /tmp/tmp.aidjPQNOLF Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.IHy0HsTxyc ++ cat /tmp/tmp.aidjPQNOLF Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.IHy0HsTxyc ++ cat /tmp/tmp.aidjPQNOLF Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.IHy0HsTxyc ++ cat /tmp/tmp.aidjPQNOLF Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.IHy0HsTxyc /tmp/tmp.aidjPQNOLF ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + xargs kubectl delete ns + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.rasYUwY23v ++ mktemp + kubectl_bin get ns + local LAST_ERR=/tmp/tmp.xcpMVG4fPk + local exit_status=0 + local timeout=4 ++ mktemp + local LAST_OUT=/tmp/tmp.Kp3BVMP3mf ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + local LAST_ERR=/tmp/tmp.hPjIe9EtJa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Kp3BVMP3mf + cat /tmp/tmp.hPjIe9EtJa + rm /tmp/tmp.Kp3BVMP3mf /tmp/tmp.hPjIe9EtJa + return 0 namespace "demand-backup-12827" deleted namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rasYUwY23v namespace "psmdb-operator" deleted + cat /tmp/tmp.xcpMVG4fPk + rm /tmp/tmp.rasYUwY23v /tmp/tmp.xcpMVG4fPk + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.IGAp2GHf9c ++ mktemp + local LAST_ERR=/tmp/tmp.Gn5roSnoxJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IGAp2GHf9c + cat /tmp/tmp.Gn5roSnoxJ + rm /tmp/tmp.IGAp2GHf9c /tmp/tmp.Gn5roSnoxJ + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.i0Z5bFZBRY ++ mktemp + local LAST_ERR=/tmp/tmp.BzqBXLP8D6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.i0Z5bFZBRY namespace/psmdb-operator created + cat /tmp/tmp.BzqBXLP8D6 + rm /tmp/tmp.i0Z5bFZBRY /tmp/tmp.BzqBXLP8D6 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.w8paciQKae +++ mktemp ++ local LAST_ERR=/tmp/tmp.HttBBxF7ND ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.w8paciQKae ++ cat /tmp/tmp.HttBBxF7ND ++ rm /tmp/tmp.w8paciQKae /tmp/tmp.HttBBxF7ND ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1912-ab1be45a-6-cluster10 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.GygQE5ukyO ++ mktemp + local LAST_ERR=/tmp/tmp.2khSiTWdOK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1912-ab1be45a-6-cluster10 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GygQE5ukyO Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1912-ab1be45a-6-cluster10" modified. + cat /tmp/tmp.2khSiTWdOK + rm /tmp/tmp.GygQE5ukyO /tmp/tmp.2khSiTWdOK + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.oc1fyJL7so ++ mktemp + local LAST_ERR=/tmp/tmp.09WZzwCCuz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oc1fyJL7so customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.09WZzwCCuz + rm /tmp/tmp.oc1fyJL7so /tmp/tmp.09WZzwCCuz + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - + sed -e 's^namespace: .*^namespace: psmdb-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.VeHTP92iOS ++ mktemp + local LAST_ERR=/tmp/tmp.uL828H8na2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VeHTP92iOS clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.uL828H8na2 + rm /tmp/tmp.VeHTP92iOS /tmp/tmp.uL828H8na2 + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1912-ab1be45a") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.kvJoqn2Qau ++ mktemp + local LAST_ERR=/tmp/tmp.9l4psshI0z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kvJoqn2Qau deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.9l4psshI0z + rm /tmp/tmp.kvJoqn2Qau /tmp/tmp.9l4psshI0z + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ZD8i2ATWW +++ mktemp ++ local LAST_ERR=/tmp/tmp.BLg9wTzFLa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7ZD8i2ATWW ++ cat /tmp/tmp.BLg9wTzFLa ++ rm /tmp/tmp.7ZD8i2ATWW /tmp/tmp.BLg9wTzFLa ++ return 0 + wait_pod percona-server-mongodb-operator-5b4f87c7bd-2qvfk + local pod=percona-server-mongodb-operator-5b4f87c7bd-2qvfk + set +o xtrace waiting for pod/percona-server-mongodb-operator-5b4f87c7bd-2qvfk to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jr7nIijqGw +++ mktemp ++ local LAST_ERR=/tmp/tmp.nwrXptniro ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Jr7nIijqGw ++ cat /tmp/tmp.nwrXptniro ++ rm /tmp/tmp.Jr7nIijqGw /tmp/tmp.nwrXptniro ++ return 0 + kubectl_bin logs percona-server-mongodb-operator-5b4f87c7bd-2qvfk ++ mktemp + local LAST_OUT=/tmp/tmp.yMdzt9iBrK ++ mktemp + local LAST_ERR=/tmp/tmp.PoPT0u8NXv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs percona-server-mongodb-operator-5b4f87c7bd-2qvfk + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yMdzt9iBrK + cat /tmp/tmp.PoPT0u8NXv + rm /tmp/tmp.yMdzt9iBrK /tmp/tmp.PoPT0u8NXv + return 0 2025-05-21T11:35:36.952Z INFO setup Manager starting up {"gitCommit": "ab1be45a35452d5d20912637f506f22dd6dd4c33", "gitBranch": "PR-1912-ab1be45a", "buildTime": "", "goVersion": "go1.24.3", "os": "linux", "arch": "amd64"} + create_namespace demand-backup-13973 + local namespace=demand-backup-13973 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrole ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces demand-backup-13973' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-13973 ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + kubectl_bin delete namespace demand-backup-13973 --ignore-not-found + awk '{print$1}' + kubectl_bin get ns + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.Nbohuu8b8i ++ mktemp + local LAST_OUT=/tmp/tmp.7XVVYzHmEA ++ mktemp + local LAST_ERR=/tmp/tmp.WQ6L1zAKwW + local exit_status=0 + local timeout=4 ++ mktemp + local LAST_ERR=/tmp/tmp.ljh1DM4pPQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace demand-backup-13973 --ignore-not-found ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7XVVYzHmEA + cat /tmp/tmp.ljh1DM4pPQ + rm /tmp/tmp.7XVVYzHmEA /tmp/tmp.ljh1DM4pPQ + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Nbohuu8b8i + cat /tmp/tmp.WQ6L1zAKwW + rm /tmp/tmp.Nbohuu8b8i /tmp/tmp.WQ6L1zAKwW + return 0 + kubectl_bin wait --for=delete namespace demand-backup-13973 ++ mktemp + local LAST_OUT=/tmp/tmp.faWeLJOKeg ++ mktemp + local LAST_ERR=/tmp/tmp.065jm0MXom + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace demand-backup-13973 namespace "gke-managed-cim" deleted namespace "gke-managed-system" deleted namespace "gmp-public" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.faWeLJOKeg + cat /tmp/tmp.065jm0MXom namespace "gmp-system" deleted + rm /tmp/tmp.faWeLJOKeg /tmp/tmp.065jm0MXom + return 0 + desc 'create namespace demand-backup-13973' + set +o xtrace ----------------------------------------------------------------------------------- create namespace demand-backup-13973 ----------------------------------------------------------------------------------- + kubectl_bin create namespace demand-backup-13973 ++ mktemp + local LAST_OUT=/tmp/tmp.mN2QqrmvoM ++ mktemp + local LAST_ERR=/tmp/tmp.5mXeeCsdwj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace demand-backup-13973 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mN2QqrmvoM namespace/demand-backup-13973 created + cat /tmp/tmp.5mXeeCsdwj + rm /tmp/tmp.mN2QqrmvoM /tmp/tmp.5mXeeCsdwj + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.LJIdDMppOp +++ mktemp ++ local LAST_ERR=/tmp/tmp.7GYCkii5ll ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LJIdDMppOp ++ cat /tmp/tmp.7GYCkii5ll ++ rm /tmp/tmp.LJIdDMppOp /tmp/tmp.7GYCkii5ll ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1912-ab1be45a-6-cluster10 --namespace=demand-backup-13973 ++ mktemp + local LAST_OUT=/tmp/tmp.xfw6RvicmD ++ mktemp + local LAST_ERR=/tmp/tmp.m7fGbsAUXr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1912-ab1be45a-6-cluster10 --namespace=demand-backup-13973 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xfw6RvicmD Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1912-ab1be45a-6-cluster10" modified. + cat /tmp/tmp.m7fGbsAUXr + rm /tmp/tmp.xfw6RvicmD /tmp/tmp.m7fGbsAUXr + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Wed May 21 11:36:06 2025 NAMESPACE: demand-backup-13973 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-13973.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-13973 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-13973 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-13973 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-13973 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gON1K5uImr +++ mktemp ++ local LAST_ERR=/tmp/tmp.3Lb8LIN6cH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gON1K5uImr ++ cat /tmp/tmp.3Lb8LIN6cH ++ rm /tmp/tmp.gON1K5uImr /tmp/tmp.3Lb8LIN6cH ++ return 0 + MINIO_POD=minio-service-86dfccd949-hc9qv + wait_pod minio-service-86dfccd949-hc9qv + local pod=minio-service-86dfccd949-hc9qv + set +o xtrace waiting for pod/minio-service-86dfccd949-hc9qv to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-13973.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.jDB8GPDy92 ++ mktemp + local LAST_ERR=/tmp/tmp.Jd01bfNmE9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-13973.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jDB8GPDy92 service/minio-service created + cat /tmp/tmp.Jd01bfNmE9 + rm /tmp/tmp.jDB8GPDy92 /tmp/tmp.Jd01bfNmE9 + return 0 + create_minio_bucket operator-testing + local bucket=operator-testing + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.L1bX2OM7q0 ++ mktemp + local LAST_ERR=/tmp/tmp.m07YKHPL2z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.L1bX2OM7q0 make_bucket: operator-testing pod "aws-cli" deleted + cat /tmp/tmp.m07YKHPL2z + rm /tmp/tmp.L1bX2OM7q0 /tmp/tmp.m07YKHPL2z + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + cluster=some-name-rs0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.P5bbwtEwMq ++ mktemp + local LAST_ERR=/tmp/tmp.fmuOHhAYo8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.P5bbwtEwMq secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.fmuOHhAYo8 + rm /tmp/tmp.P5bbwtEwMq /tmp/tmp.fmuOHhAYo8 + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.gaEwWphGUv ++ mktemp + local LAST_ERR=/tmp/tmp.eLefw2FRw2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gaEwWphGUv secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.eLefw2FRw2 + rm /tmp/tmp.gaEwWphGUv /tmp/tmp.eLefw2FRw2 + return 0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/some-name-rs0.yml ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + local LAST_OUT=/tmp/tmp.dUdStXBzMl + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ mktemp + local LAST_ERR=/tmp/tmp.Bv5flkpUvp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1912-ab1be45a"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dUdStXBzMl perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.Bv5flkpUvp + rm /tmp/tmp.dUdStXBzMl /tmp/tmp.Bv5flkpUvp + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready......OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.....OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kMZ7v24Q7B +++ mktemp ++ local LAST_ERR=/tmp/tmp.hKbLYd4Y6k ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kMZ7v24Q7B ++ cat /tmp/tmp.hKbLYd4Y6k ++ rm /tmp/tmp.kMZ7v24Q7B /tmp/tmp.hKbLYd4Y6k ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rNLgncHfCj +++ mktemp ++ local LAST_ERR=/tmp/tmp.6sGNOAdkAn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rNLgncHfCj ++ cat /tmp/tmp.6sGNOAdkAn ++ rm /tmp/tmp.rNLgncHfCj /tmp/tmp.6sGNOAdkAn ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness. + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.O5rEQupLX6/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-13973", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.YHrGB1s5XL ++ mktemp + local LAST_ERR=/tmp/tmp.GEo8d3j5lE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YHrGB1s5XL + cat /tmp/tmp.GEo8d3j5lE + rm /tmp/tmp.YHrGB1s5XL /tmp/tmp.GEo8d3j5lE + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.O5rEQupLX6/statefulset_some-name-rs0.yml + version_gt 1.22 ++ bc -l ++ echo '1.30 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.O5rEQupLX6/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.O5rEQupLX6/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml /tmp/tmp.O5rEQupLX6/statefulset_some-name-rs0.yml + desc 'create user' + set +o xtrace ----------------------------------------------------------------------------------- create user ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bU9LDuJCs7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.R9ajSUpBtY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bU9LDuJCs7 ++ cat /tmp/tmp.R9ajSUpBtY ++ rm /tmp/tmp.bU9LDuJCs7 /tmp/tmp.R9ajSUpBtY ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.cvE9HS1OEy ++ mktemp + local LAST_ERR=/tmp/tmp.1VNJUwbHx7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cvE9HS1OEy Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("2f22b286-d601-4b0f-bb83-bcf5e16b026e") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.1VNJUwbHx7 + rm /tmp/tmp.cvE9HS1OEy /tmp/tmp.1VNJUwbHx7 + return 0 + sleep 2 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IYZOgjpWmU +++ mktemp ++ local LAST_ERR=/tmp/tmp.vrHemUL0iB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IYZOgjpWmU ++ cat /tmp/tmp.vrHemUL0iB ++ rm /tmp/tmp.IYZOgjpWmU /tmp/tmp.vrHemUL0iB ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Ro9UzrwakZ ++ mktemp + local LAST_ERR=/tmp/tmp.UrfGcpOCn3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ro9UzrwakZ Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("12741eed-5c50-4f21-a1a5-d792d857fed1") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.UrfGcpOCn3 + rm /tmp/tmp.Ro9UzrwakZ /tmp/tmp.UrfGcpOCn3 + return 0 + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oIme2BCHow +++ mktemp ++ local LAST_ERR=/tmp/tmp.VhUs9RIpzC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oIme2BCHow ++ cat /tmp/tmp.VhUs9RIpzC ++ rm /tmp/tmp.oIme2BCHow /tmp/tmp.VhUs9RIpzC ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Z3mEgSMbQ0 ++ mktemp + local LAST_ERR=/tmp/tmp.w5dOEVGh7o + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Z3mEgSMbQ0 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("a58b1b66-ff7c-4a5f-b4f4-7f2b8cdc90da") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.w5dOEVGh7o + rm /tmp/tmp.Z3mEgSMbQ0 /tmp/tmp.w5dOEVGh7o + return 0 + run_mongo 'use myApp\n db.test3.insert({ x: 100502 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test3.insert({ x: 100502 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lCafgMn6dF +++ mktemp ++ local LAST_ERR=/tmp/tmp.75Yw3G9qlw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lCafgMn6dF ++ cat /tmp/tmp.75Yw3G9qlw ++ rm /tmp/tmp.lCafgMn6dF /tmp/tmp.75Yw3G9qlw ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test3.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.PSJBdx02xz ++ mktemp + local LAST_ERR=/tmp/tmp.UnyabfUWVb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test3.insert({ x: 100502 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PSJBdx02xz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("a7128830-0aad-4329-ba21-919a98ff4b5d") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.UnyabfUWVb + rm /tmp/tmp.PSJBdx02xz /tmp/tmp.UnyabfUWVb + return 0 + custom_user_name=test1user + custom_role_name=test1role + run_mongo 'use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FIdva19auT +++ mktemp ++ local LAST_ERR=/tmp/tmp.irIDUdzPrN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FIdva19auT ++ cat /tmp/tmp.irIDUdzPrN ++ rm /tmp/tmp.FIdva19auT /tmp/tmp.irIDUdzPrN ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.cW3KUWKT6g ++ mktemp + local LAST_ERR=/tmp/tmp.TDXd4VC28m + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.createUser({user: "test1user",pwd:"test1pass",roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cW3KUWKT6g Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d50e5582-7149-47d1-b879-9b715837a9c7") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp Successfully added user: { "user" : "test1user", "roles" : [ ] } bye + cat /tmp/tmp.TDXd4VC28m + rm /tmp/tmp.cW3KUWKT6g /tmp/tmp.TDXd4VC28m + return 0 + run_mongo 'use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VVIOPjnz3J +++ mktemp ++ local LAST_ERR=/tmp/tmp.X1cIykyKEu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VVIOPjnz3J ++ cat /tmp/tmp.X1cIykyKEu ++ rm /tmp/tmp.VVIOPjnz3J /tmp/tmp.X1cIykyKEu ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.V7WIKSjcFC ++ mktemp + local LAST_ERR=/tmp/tmp.yFQlgyE2Td + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.createRole({"role":"test1role", privileges:[],roles:[]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.V7WIKSjcFC Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("1cab586c-5f0e-400a-8ec6-9f54b9618a24") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp { "role" : "test1role", "privileges" : [ ], "roles" : [ ] } bye + cat /tmp/tmp.yFQlgyE2Td + rm /tmp/tmp.V7WIKSjcFC /tmp/tmp.yFQlgyE2Td + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:38:35+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cwQftp6RlF +++ mktemp ++ local LAST_ERR=/tmp/tmp.MdyghqPr1f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cwQftp6RlF ++ cat /tmp/tmp.MdyghqPr1f ++ rm /tmp/tmp.cwQftp6RlF /tmp/tmp.MdyghqPr1f ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.HFmCECdweW ++ mktemp + local LAST_ERR=/tmp/tmp.sWqBhxIcem + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HFmCECdweW + cat /tmp/tmp.sWqBhxIcem + rm /tmp/tmp.HFmCECdweW /tmp/tmp.sWqBhxIcem + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:38:38+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dBcfLRD99Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.u0JTsWxoIN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dBcfLRD99Y ++ cat /tmp/tmp.u0JTsWxoIN ++ rm /tmp/tmp.dBcfLRD99Y /tmp/tmp.u0JTsWxoIN ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.anbYMwkwyp ++ mktemp + local LAST_ERR=/tmp/tmp.2ZxJ3I13WD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.anbYMwkwyp + cat /tmp/tmp.2ZxJ3I13WD + rm /tmp/tmp.anbYMwkwyp /tmp/tmp.2ZxJ3I13WD + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:38:43+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DxJQa3MToD +++ mktemp ++ local LAST_ERR=/tmp/tmp.G4vCNMsB9Y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DxJQa3MToD ++ cat /tmp/tmp.G4vCNMsB9Y ++ rm /tmp/tmp.DxJQa3MToD /tmp/tmp.G4vCNMsB9Y ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SWiMoXKyeo ++ mktemp + local LAST_ERR=/tmp/tmp.UkxDqmXw6t + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SWiMoXKyeo + cat /tmp/tmp.UkxDqmXw6t + rm /tmp/tmp.SWiMoXKyeo /tmp/tmp.UkxDqmXw6t + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-0...2025-05-21T11:38:10.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-1...2025-05-21T11:38:08.000+0000 I listening for the commands + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace waiting for pbm-agent to be ready in some-name-rs0-2...2025-05-21T11:38:14.000+0000 I listening for the commands + backup_name_minio=backup-minio + '[' -z '' ']' + backup_name_aws=backup-aws-s3 + backup_name_gcp=backup-gcp-cs + backup_name_azure=backup-azure-blob + desc 'run backups' + set +o xtrace ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local type=logical + desc 'run backup backup-aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/backup-aws-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.rrnQvaijyy ++ mktemp + local LAST_ERR=/tmp/tmp.v7aUFYR6aS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rrnQvaijyy perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.v7aUFYR6aS + rm /tmp/tmp.rrnQvaijyy /tmp/tmp.v7aUFYR6aS + return 0 + run_backup gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + local type=logical + desc 'run backup backup-gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-gcp-cs" | .spec.storageName = "gcp-cs" ++ mktemp | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/backup-gcp-cs.yml + local LAST_OUT=/tmp/tmp.ywHzCyS2WG ++ mktemp + local LAST_ERR=/tmp/tmp.zRTbJAFItq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ywHzCyS2WG perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.zRTbJAFItq + rm /tmp/tmp.ywHzCyS2WG /tmp/tmp.zRTbJAFItq + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local type=logical + desc 'run backup backup-azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/backup-azure-blob.yml ++ mktemp + local LAST_OUT=/tmp/tmp.bEtfcJkbIs ++ mktemp + local LAST_ERR=/tmp/tmp.YYbchF8YeZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bEtfcJkbIs perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.YYbchF8YeZ + rm /tmp/tmp.bEtfcJkbIs /tmp/tmp.YYbchF8YeZ + return 0 + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/backup-minio.yml ++ mktemp + local LAST_OUT=/tmp/tmp.kcKAf0iOja ++ mktemp + local LAST_ERR=/tmp/tmp.BdkSIXLhOw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kcKAf0iOja perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.BdkSIXLhOw + rm /tmp/tmp.kcKAf0iOja /tmp/tmp.BdkSIXLhOw + return 0 + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state. + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + local target_state=ready + set +o xtrace waiting for backup-gcp-cs to reach ready state......... + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state. + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state.... + sleep 5 + '[' -z '' ']' + desc 'check backup and restore -- aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- ++ get_backup_dest backup-aws-s3 ++ local backup_name=backup-aws-s3 ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' ++ kubectl_bin get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0qpK22fQzN +++ mktemp ++ local LAST_ERR=/tmp/tmp.GZg9XTwvhI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0qpK22fQzN ++ cat /tmp/tmp.GZg9XTwvhI ++ rm /tmp/tmp.0qpK22fQzN /tmp/tmp.GZg9XTwvhI ++ return 0 + backup_dest_aws=operator-testing/psmdb-demand-backup/2025-05-21T11:38:53Z + curl -s https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-21T11:38:53Z/rs0/myApp.test.gz + gunzip + run_recovery_check backup-aws-s3 some-name-rs0 + local backup=backup-aws-s3 + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CSiNVwO0Gv +++ mktemp ++ local LAST_ERR=/tmp/tmp.lAhpdzjeR4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CSiNVwO0Gv ++ cat /tmp/tmp.lAhpdzjeR4 ++ rm /tmp/tmp.CSiNVwO0Gv /tmp/tmp.lAhpdzjeR4 ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Gi0iGtaOFy ++ mktemp + local LAST_ERR=/tmp/tmp.TPzj3ypAOI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Gi0iGtaOFy Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("60ddc01c-2ab8-4715-8b33-4247e1cea684") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.TPzj3ypAOI + rm /tmp/tmp.Gi0iGtaOFy /tmp/tmp.TPzj3ypAOI + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-13973 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:39:50+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-13973 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local LAST_OUT=/tmp/tmp.0Kr2VBxrdK +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZILKtjJXHH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0Kr2VBxrdK ++ cat /tmp/tmp.ZILKtjJXHH ++ rm /tmp/tmp.0Kr2VBxrdK /tmp/tmp.ZILKtjJXHH ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.B2cPN1QVyO ++ mktemp + local LAST_ERR=/tmp/tmp.NT28Td8Cy8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.B2cPN1QVyO + cat /tmp/tmp.NT28Td8Cy8 + rm /tmp/tmp.B2cPN1QVyO /tmp/tmp.NT28Td8Cy8 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.O5rEQupLX6/find-2nd + run_restore backup-aws-s3 + local backup_name=backup-aws-s3 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-backup-aws-s3/' + /usr/bin/sed -e 's/backupName:/backupName: backup-aws-s3/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.VItPcxsKIH ++ mktemp + local LAST_ERR=/tmp/tmp.CzVuuWwtMb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VItPcxsKIH perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created + cat /tmp/tmp.CzVuuWwtMb + rm /tmp/tmp.VItPcxsKIH /tmp/tmp.CzVuuWwtMb + return 0 + wait_restore backup-aws-s3 some-name + local backup_name=backup-aws-s3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-aws-s3 object to be createdOK Waiting psmdb-restore/restore-backup-aws-s3 to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9Qcu7Mp0D6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8ujEDq38zt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9Qcu7Mp0D6 ++ cat /tmp/tmp.8ujEDq38zt ++ rm /tmp/tmp.9Qcu7Mp0D6 /tmp/tmp.8ujEDq38zt ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:40:16+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R1HBsZ6Jq1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8LoAlhC4zF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.R1HBsZ6Jq1 ++ cat /tmp/tmp.8LoAlhC4zF ++ rm /tmp/tmp.R1HBsZ6Jq1 /tmp/tmp.8LoAlhC4zF ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.z7uEUoQr0M ++ mktemp + local LAST_ERR=/tmp/tmp.6mQvHcVDfQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.z7uEUoQr0M + cat /tmp/tmp.6mQvHcVDfQ + rm /tmp/tmp.z7uEUoQr0M /tmp/tmp.6mQvHcVDfQ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:40:20+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PsJltuhCH5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.n6MuhgJ3Ce ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PsJltuhCH5 ++ cat /tmp/tmp.n6MuhgJ3Ce ++ rm /tmp/tmp.PsJltuhCH5 /tmp/tmp.n6MuhgJ3Ce ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.OLhBkVNIsj ++ mktemp + local LAST_ERR=/tmp/tmp.sny24Z0TMD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OLhBkVNIsj + cat /tmp/tmp.sny24Z0TMD + rm /tmp/tmp.OLhBkVNIsj /tmp/tmp.sny24Z0TMD + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:40:23+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.scTfLcpsrN +++ mktemp ++ local LAST_ERR=/tmp/tmp.FlV6Nw5woi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.scTfLcpsrN ++ cat /tmp/tmp.FlV6Nw5woi ++ rm /tmp/tmp.scTfLcpsrN /tmp/tmp.FlV6Nw5woi ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.XgOQJ59LMN ++ mktemp + local LAST_ERR=/tmp/tmp.NT3Oe1qCKK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XgOQJ59LMN + cat /tmp/tmp.NT3Oe1qCKK + rm /tmp/tmp.XgOQJ59LMN /tmp/tmp.NT3Oe1qCKK + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- ++ get_backup_dest backup-gcp-cs ++ local backup_name=backup-gcp-cs ++ kubectl_bin get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eljmPyxRIo +++ mktemp ++ local LAST_ERR=/tmp/tmp.HuXJuTN5pM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eljmPyxRIo ++ cat /tmp/tmp.HuXJuTN5pM ++ rm /tmp/tmp.eljmPyxRIo /tmp/tmp.HuXJuTN5pM ++ return 0 + backup_dest_gcp=operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z + gunzip + curl -s https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z/rs0/myApp.test.gz + run_recovery_check backup-gcp-cs some-name-rs0 + local backup=backup-gcp-cs + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y1nNuASKtV +++ mktemp ++ local LAST_ERR=/tmp/tmp.jRzLfGD0wi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.y1nNuASKtV ++ cat /tmp/tmp.jRzLfGD0wi ++ rm /tmp/tmp.y1nNuASKtV /tmp/tmp.jRzLfGD0wi ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.9JmjGYYheT ++ mktemp + local LAST_ERR=/tmp/tmp.NI4F0aI6jL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9JmjGYYheT Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("b85f5a8d-1dc4-49f4-b325-dbf5c6684e84") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.NI4F0aI6jL + rm /tmp/tmp.9JmjGYYheT /tmp/tmp.NI4F0aI6jL + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-13973 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:40:32+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-13973 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.N81qwp3GN9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RaKdPS29zt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.N81qwp3GN9 ++ cat /tmp/tmp.RaKdPS29zt ++ rm /tmp/tmp.N81qwp3GN9 /tmp/tmp.RaKdPS29zt ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.IBAVnftflK ++ mktemp + local LAST_ERR=/tmp/tmp.yiBFraABCK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IBAVnftflK + cat /tmp/tmp.yiBFraABCK + rm /tmp/tmp.IBAVnftflK /tmp/tmp.yiBFraABCK + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.O5rEQupLX6/find-2nd + run_restore backup-gcp-cs + local backup_name=backup-gcp-cs + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/backupName:/backupName: backup-gcp-cs/' + /usr/bin/sed -e 's/name:/name: restore-backup-gcp-cs/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.xvEzC37Xr2 ++ mktemp + local LAST_ERR=/tmp/tmp.4icELbQBUr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xvEzC37Xr2 perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created + cat /tmp/tmp.4icELbQBUr + rm /tmp/tmp.xvEzC37Xr2 /tmp/tmp.4icELbQBUr + return 0 + wait_restore backup-gcp-cs some-name + local backup_name=backup-gcp-cs + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-gcp-cs object to be createdOK Waiting psmdb-restore/restore-backup-gcp-cs to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6q0D9pZFub +++ mktemp ++ local LAST_ERR=/tmp/tmp.FP133eTdn8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6q0D9pZFub ++ cat /tmp/tmp.FP133eTdn8 ++ rm /tmp/tmp.6q0D9pZFub /tmp/tmp.FP133eTdn8 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:40:55+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 mongodb '' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.alLhT2hXaN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ILJ3AHHI9e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.alLhT2hXaN ++ cat /tmp/tmp.ILJ3AHHI9e ++ rm /tmp/tmp.alLhT2hXaN /tmp/tmp.ILJ3AHHI9e ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ubDBQkbNqb ++ mktemp + local LAST_ERR=/tmp/tmp.WJqqcxFvCp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ubDBQkbNqb + cat /tmp/tmp.WJqqcxFvCp + rm /tmp/tmp.ubDBQkbNqb /tmp/tmp.WJqqcxFvCp + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:40:58+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.21vSZUIJok +++ mktemp ++ local LAST_ERR=/tmp/tmp.X25cD3eXto ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.21vSZUIJok ++ cat /tmp/tmp.X25cD3eXto ++ rm /tmp/tmp.21vSZUIJok /tmp/tmp.X25cD3eXto ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.C0O6KwlXwe ++ mktemp + local LAST_ERR=/tmp/tmp.mdX60Z0nDl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C0O6KwlXwe + cat /tmp/tmp.mdX60Z0nDl + rm /tmp/tmp.C0O6KwlXwe /tmp/tmp.mdX60Z0nDl + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:41:01+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.ndQa6Mcvzi +++ mktemp ++ local LAST_ERR=/tmp/tmp.rs2kcdZcSS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ndQa6Mcvzi ++ cat /tmp/tmp.rs2kcdZcSS ++ rm /tmp/tmp.ndQa6Mcvzi /tmp/tmp.rs2kcdZcSS ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bz16Cps5QN ++ mktemp + local LAST_ERR=/tmp/tmp.hvP10q7dYd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bz16Cps5QN + cat /tmp/tmp.hvP10q7dYd + rm /tmp/tmp.bz16Cps5QN /tmp/tmp.hvP10q7dYd + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- ++ get_backup_dest backup-azure-blob ++ local backup_name=backup-azure-blob ++ kubectl_bin get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' +++ mktemp ++ sed -e 's/.json$//' ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.x9dqSiWcsf +++ mktemp ++ local LAST_ERR=/tmp/tmp.bBvonaPz2x ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.x9dqSiWcsf ++ cat /tmp/tmp.bBvonaPz2x ++ rm /tmp/tmp.x9dqSiWcsf /tmp/tmp.bBvonaPz2x ++ return 0 + backup_dest_azure=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z + curl -s https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z/rs0/myApp.test.gz + gunzip + run_recovery_check backup-azure-blob some-name-rs0 + local backup=backup-azure-blob + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6cI39qjXbm +++ mktemp ++ local LAST_ERR=/tmp/tmp.e0mIxwcrDq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6cI39qjXbm ++ cat /tmp/tmp.e0mIxwcrDq ++ rm /tmp/tmp.6cI39qjXbm /tmp/tmp.e0mIxwcrDq ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.8RHZnr3Gck ++ mktemp + local LAST_ERR=/tmp/tmp.vF0flxidxh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8RHZnr3Gck Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("e3471238-83e4-4402-bb39-78caaa371889") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.vF0flxidxh + rm /tmp/tmp.8RHZnr3Gck /tmp/tmp.vF0flxidxh + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-13973 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:41:06+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-13973 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V2sS8Vkvll +++ mktemp ++ local LAST_ERR=/tmp/tmp.ht8yuFXfL3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.V2sS8Vkvll ++ cat /tmp/tmp.ht8yuFXfL3 ++ rm /tmp/tmp.V2sS8Vkvll /tmp/tmp.ht8yuFXfL3 ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.sBV8D2rwY6 ++ mktemp + local LAST_ERR=/tmp/tmp.8ruoBOAYuO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sBV8D2rwY6 + cat /tmp/tmp.8ruoBOAYuO + rm /tmp/tmp.sBV8D2rwY6 /tmp/tmp.8ruoBOAYuO + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.O5rEQupLX6/find-2nd + run_restore backup-azure-blob + local backup_name=backup-azure-blob + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-backup-azure-blob/' + /usr/bin/sed -e 's/backupName:/backupName: backup-azure-blob/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.2LWXV9uqf0 ++ mktemp + local LAST_ERR=/tmp/tmp.hMpJVNEsRp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2LWXV9uqf0 perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created + cat /tmp/tmp.hMpJVNEsRp + rm /tmp/tmp.2LWXV9uqf0 /tmp/tmp.hMpJVNEsRp + return 0 + wait_restore backup-azure-blob some-name + local backup_name=backup-azure-blob + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-azure-blob object to be createdOK Waiting psmdb-restore/restore-backup-azure-blob to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ggOeM4Ed0z +++ mktemp ++ local LAST_ERR=/tmp/tmp.mv2pifF2mu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ggOeM4Ed0z ++ cat /tmp/tmp.mv2pifF2mu ++ rm /tmp/tmp.ggOeM4Ed0z /tmp/tmp.mv2pifF2mu ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:41:30+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BhGE5oMqoY +++ mktemp ++ local LAST_ERR=/tmp/tmp.FvROA1kiyK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BhGE5oMqoY ++ cat /tmp/tmp.FvROA1kiyK ++ rm /tmp/tmp.BhGE5oMqoY /tmp/tmp.FvROA1kiyK ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.6Li75etAG7 ++ mktemp + local LAST_ERR=/tmp/tmp.mtrakslNHV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6Li75etAG7 + cat /tmp/tmp.mtrakslNHV + rm /tmp/tmp.6Li75etAG7 /tmp/tmp.mtrakslNHV + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:41:32+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UrDzR0hwS1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tDcHR5cidX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UrDzR0hwS1 ++ cat /tmp/tmp.tDcHR5cidX ++ rm /tmp/tmp.UrDzR0hwS1 /tmp/tmp.tDcHR5cidX ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.QSt928b3pV ++ mktemp + local LAST_ERR=/tmp/tmp.CRtURgUWH6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QSt928b3pV + cat /tmp/tmp.CRtURgUWH6 + rm /tmp/tmp.QSt928b3pV /tmp/tmp.CRtURgUWH6 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:41:35+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fZNsl2amGH +++ mktemp ++ local LAST_ERR=/tmp/tmp.67o7YeXVYN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fZNsl2amGH ++ cat /tmp/tmp.67o7YeXVYN ++ rm /tmp/tmp.fZNsl2amGH /tmp/tmp.67o7YeXVYN ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.fndg4OJXz7 ++ mktemp + local LAST_ERR=/tmp/tmp.wjnUezfcKw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fndg4OJXz7 + cat /tmp/tmp.wjnUezfcKw + rm /tmp/tmp.fndg4OJXz7 /tmp/tmp.wjnUezfcKw + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ sed 's|azure://||' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U3OcFBHasF +++ mktemp ++ local LAST_ERR=/tmp/tmp.1kt6sVITsh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.U3OcFBHasF ++ cat /tmp/tmp.1kt6sVITsh ++ rm /tmp/tmp.U3OcFBHasF /tmp/tmp.1kt6sVITsh ++ return 0 + backup_dest_minio=operator-testing/2025-05-21T11:39:28Z + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-05-21T11:39:28Z/rs0/ + grep myApp.test.gz ++ mktemp + local LAST_OUT=/tmp/tmp.WGYOXMGArV ++ mktemp + local LAST_ERR=/tmp/tmp.o2cYu00Gto + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2025-05-21T11:39:28Z/rs0/ + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WGYOXMGArV + cat /tmp/tmp.o2cYu00Gto If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-13973 + rm /tmp/tmp.WGYOXMGArV /tmp/tmp.o2cYu00Gto + return 0 2025-05-21 11:39:31 56 myApp.test.gz + run_recovery_check backup-minio some-name-rs0 + local backup=backup-minio + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JxjBiRhLl5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vgDaJFSLme ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JxjBiRhLl5 ++ cat /tmp/tmp.vgDaJFSLme ++ rm /tmp/tmp.JxjBiRhLl5 /tmp/tmp.vgDaJFSLme ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jxtyWHxcn5 ++ mktemp + local LAST_ERR=/tmp/tmp.Jq2oT8vYZf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jxtyWHxcn5 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("c8ac9038-4726-412e-93c5-f2bb4c414c85") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.Jq2oT8vYZf + rm /tmp/tmp.jxtyWHxcn5 /tmp/tmp.Jq2oT8vYZf + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-13973 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:41:51+0000] running db.test.find() in myApp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-13973 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hsiLpn4fJF +++ mktemp ++ local LAST_ERR=/tmp/tmp.gGwTk9QqWE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hsiLpn4fJF ++ cat /tmp/tmp.gGwTk9QqWE ++ rm /tmp/tmp.hsiLpn4fJF /tmp/tmp.gGwTk9QqWE ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.7QBaH3HqNg ++ mktemp + local LAST_ERR=/tmp/tmp.oukHqTLFhV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7QBaH3HqNg + cat /tmp/tmp.oukHqTLFhV + rm /tmp/tmp.7QBaH3HqNg /tmp/tmp.oukHqTLFhV + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.O5rEQupLX6/find-2nd + run_restore backup-minio + local backup_name=backup-minio + /usr/bin/sed -e 's/backupName:/backupName: backup-minio/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/restore.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's/name:/name: restore-backup-minio/' ++ mktemp + local LAST_OUT=/tmp/tmp.k1LHf0wWtP ++ mktemp + local LAST_ERR=/tmp/tmp.r5wNcRX7rC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.k1LHf0wWtP perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created + cat /tmp/tmp.r5wNcRX7rC + rm /tmp/tmp.k1LHf0wWtP /tmp/tmp.r5wNcRX7rC + return 0 + wait_restore backup-minio some-name + local backup_name=backup-minio + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio object to be createdOK Waiting psmdb-restore/restore-backup-minio to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RcTin5T5nR +++ mktemp ++ local LAST_ERR=/tmp/tmp.J8uH7FjTbG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RcTin5T5nR ++ cat /tmp/tmp.J8uH7FjTbG ++ rm /tmp/tmp.RcTin5T5nR /tmp/tmp.J8uH7FjTbG ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:42:18+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OT20kuWNKe +++ mktemp ++ local LAST_ERR=/tmp/tmp.d4efYQqt1v ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OT20kuWNKe ++ cat /tmp/tmp.d4efYQqt1v ++ rm /tmp/tmp.OT20kuWNKe /tmp/tmp.d4efYQqt1v ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Uv3fAPQHUB ++ mktemp + local LAST_ERR=/tmp/tmp.n0G2K6zdS5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Uv3fAPQHUB + cat /tmp/tmp.n0G2K6zdS5 + rm /tmp/tmp.Uv3fAPQHUB /tmp/tmp.n0G2K6zdS5 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:42:21+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yML368tK1m +++ mktemp ++ local LAST_ERR=/tmp/tmp.eKU3xb1c9Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yML368tK1m ++ cat /tmp/tmp.eKU3xb1c9Q ++ rm /tmp/tmp.yML368tK1m /tmp/tmp.eKU3xb1c9Q ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.H4dRZtKTFY ++ mktemp + local LAST_ERR=/tmp/tmp.1e36aAxalY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.H4dRZtKTFY + cat /tmp/tmp.1e36aAxalY + rm /tmp/tmp.H4dRZtKTFY /tmp/tmp.1e36aAxalY + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:42:25+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SVdcbOKWnQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.HiHnm6UtFU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SVdcbOKWnQ ++ cat /tmp/tmp.HiHnm6UtFU ++ rm /tmp/tmp.SVdcbOKWnQ /tmp/tmp.HiHnm6UtFU ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.mr1Ke56uIo ++ mktemp + local LAST_ERR=/tmp/tmp.lZMDNd4f4S + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mr1Ke56uIo + cat /tmp/tmp.lZMDNd4f4S + rm /tmp/tmp.mr1Ke56uIo /tmp/tmp.lZMDNd4f4S + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + run_mongo 'use myApp\n db.dropUser("test1user")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.dropUser("test1user")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bmwo67yLvY +++ mktemp ++ local LAST_ERR=/tmp/tmp.io53tQivxU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bmwo67yLvY ++ cat /tmp/tmp.io53tQivxU ++ rm /tmp/tmp.bmwo67yLvY /tmp/tmp.io53tQivxU ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.dropUser("test1user")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.mpTmTroYwa ++ mktemp + local LAST_ERR=/tmp/tmp.SUNBhM4nVZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.dropUser("test1user")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mpTmTroYwa Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("b97e6690-b63c-436a-b645-56706d310129") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.SUNBhM4nVZ + rm /tmp/tmp.mpTmTroYwa /tmp/tmp.SUNBhM4nVZ + return 0 + run_mongo 'use myApp\n db.dropRole("test1role")' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.dropRole("test1role")' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ddtzIUFoUj +++ mktemp ++ local LAST_ERR=/tmp/tmp.eUOisNMyFc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ddtzIUFoUj ++ cat /tmp/tmp.eUOisNMyFc ++ rm /tmp/tmp.ddtzIUFoUj /tmp/tmp.eUOisNMyFc ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.dropRole("test1role")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.l6erEtmP96 ++ mktemp + local LAST_ERR=/tmp/tmp.gtjSOxkPdk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.dropRole("test1role")\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.l6erEtmP96 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ce0a5759-7e73-4b6a-9909-f8bc7fb196e8") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.gtjSOxkPdk + rm /tmp/tmp.l6erEtmP96 /tmp/tmp.gtjSOxkPdk + return 0 + desc 'selective restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- selective restore -- minio ----------------------------------------------------------------------------------- + run_recovery_check_selective backup-minio some-name-rs0 myApp test2 false test1user test1role + local backup=backup-minio + local cluster=some-name-rs0 + local database=myApp + local collection=test2 + local with_users_and_roles=false + local username=test1user + local role=test1role + restore_name=restore-backup-minio-selective + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V6a5U6LJXl +++ mktemp ++ local LAST_ERR=/tmp/tmp.6QYjce8AYn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.V6a5U6LJXl ++ cat /tmp/tmp.6QYjce8AYn ++ rm /tmp/tmp.V6a5U6LJXl /tmp/tmp.6QYjce8AYn ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.wRptXNtwmy ++ mktemp + local LAST_ERR=/tmp/tmp.VtsVSTWRj2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wRptXNtwmy Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("3f34223d-be99-4dcc-a751-5852061b0907") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.VtsVSTWRj2 + rm /tmp/tmp.wRptXNtwmy /tmp/tmp.VtsVSTWRj2 + return 0 ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-13973 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-13973 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'index("test2") != null' ++ grep -v 'switched to' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.DanPYQklwU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.hx2d2rLQ5L +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.DanPYQklwU +++ cat /tmp/tmp.hx2d2rLQ5L +++ rm /tmp/tmp.DanPYQklwU /tmp/tmp.hx2d2rLQ5L +++ return 0 ++ local client_container=psmdb-client-66f577db5f-g6m6m ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8SsGSv2ONs +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bc9c5fCI9l ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8SsGSv2ONs ++ cat /tmp/tmp.Bc9c5fCI9l ++ rm /tmp/tmp.8SsGSv2ONs /tmp/tmp.Bc9c5fCI9l ++ return 0 + [[ false == \t\r\u\e ]] + yq '.spec.backupName="backup-minio"' + kubectl_bin apply -f - + yq .spec.selective.withUsersAndRoles=false + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/restore.yml ++ mktemp + local LAST_OUT=/tmp/tmp.abnYAkLLuG + yq '.spec.selective.namespaces[0]="myApp.test"' + yq '.metadata.name="restore-backup-minio-selective"' ++ mktemp + local LAST_ERR=/tmp/tmp.ujIfJEoloi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.abnYAkLLuG perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-selective created + cat /tmp/tmp.ujIfJEoloi + rm /tmp/tmp.abnYAkLLuG /tmp/tmp.ujIfJEoloi + return 0 + wait_restore backup-minio-selective some-name + local backup_name=backup-minio-selective + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-selective object to be createdOK Waiting psmdb-restore/restore-backup-minio-selective to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eBX2R8Jzg1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EJlLDdYm3B ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eBX2R8Jzg1 ++ cat /tmp/tmp.EJlLDdYm3B ++ rm /tmp/tmp.eBX2R8Jzg1 /tmp/tmp.EJlLDdYm3B ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo ++ collection_exists test2 ++ local collection=test2 ++ jq 'index("test2") != null' ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-13973 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-13973 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ grep -v 'switched to' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.xmwffCtJZy ++++ mktemp +++ local LAST_ERR=/tmp/tmp.crt75m8xUC +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.xmwffCtJZy +++ cat /tmp/tmp.crt75m8xUC +++ rm /tmp/tmp.xmwffCtJZy /tmp/tmp.crt75m8xUC +++ return 0 ++ local client_container=psmdb-client-66f577db5f-g6m6m ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oaQHD0XVES +++ mktemp ++ local LAST_ERR=/tmp/tmp.U8sPFo38BV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oaQHD0XVES ++ cat /tmp/tmp.U8sPFo38BV ++ rm /tmp/tmp.oaQHD0XVES /tmp/tmp.U8sPFo38BV ++ return 0 + [[ false == \t\r\u\e ]] + [[ false == \t\r\u\e ]] + [[ false == \f\a\l\s\e ]] ++ user_exists test1user ++ local username=test1user ++ run_mongo 'use myApp\n JSON.stringify(db.getUsers())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getUsers())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'any(.[]; ._id==myApp.test1user)' jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1user) jq: 1 compile error ++ grep -v 'switched to' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ATK1SXC5F2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.yamnst1u4m +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.ATK1SXC5F2 +++ cat /tmp/tmp.yamnst1u4m +++ rm /tmp/tmp.ATK1SXC5F2 /tmp/tmp.yamnst1u4m +++ return 0 ++ local client_container=psmdb-client-66f577db5f-g6m6m ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v4pq9OKXRJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.rBDDoRQ5nX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.v4pq9OKXRJ ++ cat /tmp/tmp.rBDDoRQ5nX ++ rm /tmp/tmp.v4pq9OKXRJ /tmp/tmp.rBDDoRQ5nX ++ return 0 + [[ '' == \t\r\u\e ]] ++ role_exists test1role ++ local role=test1role ++ run_mongo 'use myApp\n JSON.stringify(db.getRoles())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getRoles())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'any(.[]; ._id==myApp.test1role)' ++ grep -v 'switched to' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.yTL0wnx1Ws ++++ mktemp +++ local LAST_ERR=/tmp/tmp.CWXQ566ksi +++ local exit_status=0 +++ local timeout=4 jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1role) jq: 1 compile error ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.yTL0wnx1Ws +++ cat /tmp/tmp.CWXQ566ksi +++ rm /tmp/tmp.yTL0wnx1Ws /tmp/tmp.CWXQ566ksi +++ return 0 ++ local client_container=psmdb-client-66f577db5f-g6m6m ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UThEZtRqrM +++ mktemp ++ local LAST_ERR=/tmp/tmp.o1EJMyaPeK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UThEZtRqrM ++ cat /tmp/tmp.o1EJMyaPeK ++ rm /tmp/tmp.UThEZtRqrM /tmp/tmp.o1EJMyaPeK ++ return 0 + [[ '' == \t\r\u\e ]] + kubectl_bin delete psmdb-restore restore-backup-minio-selective ++ mktemp + local LAST_OUT=/tmp/tmp.ecxsVXTeee ++ mktemp + local LAST_ERR=/tmp/tmp.9LgWuz1m0I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-restore restore-backup-minio-selective + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ecxsVXTeee perconaservermongodbrestore.psmdb.percona.com "restore-backup-minio-selective" deleted + cat /tmp/tmp.9LgWuz1m0I + rm /tmp/tmp.ecxsVXTeee /tmp/tmp.9LgWuz1m0I + return 0 + run_mongo 'use myApp\n db.test2.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test2.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KtUXnNJLd2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ws3va3KZJL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KtUXnNJLd2 ++ cat /tmp/tmp.Ws3va3KZJL ++ rm /tmp/tmp.KtUXnNJLd2 /tmp/tmp.Ws3va3KZJL ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.6ajMO5awQn ++ mktemp + local LAST_ERR=/tmp/tmp.61RYiDgIa6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test2.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6ajMO5awQn Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("1ba1aac1-df3a-4632-9e0a-d62e73cdea13") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.61RYiDgIa6 + rm /tmp/tmp.6ajMO5awQn /tmp/tmp.61RYiDgIa6 + return 0 + desc 'selective restore with users and roles -- minio' + set +o xtrace ----------------------------------------------------------------------------------- selective restore with users and roles -- minio ----------------------------------------------------------------------------------- + run_recovery_check_selective backup-minio some-name-rs0 myApp test2 true test1user test1role + local backup=backup-minio + local cluster=some-name-rs0 + local database=myApp + local collection=test2 + local with_users_and_roles=true + local username=test1user + local role=test1role + restore_name=restore-backup-minio-selective + run_mongo 'use myApp\n db.test2.drop()' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test2.drop()' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kdLnhgBS2u +++ mktemp ++ local LAST_ERR=/tmp/tmp.nanxaRUItN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kdLnhgBS2u ++ cat /tmp/tmp.nanxaRUItN ++ rm /tmp/tmp.kdLnhgBS2u /tmp/tmp.nanxaRUItN ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.gh4js0hr9p ++ mktemp + local LAST_ERR=/tmp/tmp.6xK3a9Prt6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test2.drop()\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gh4js0hr9p Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("557ecbe2-6d28-4d2b-aae0-9afc7da48d3b") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp true bye + cat /tmp/tmp.6xK3a9Prt6 + rm /tmp/tmp.gh4js0hr9p /tmp/tmp.6xK3a9Prt6 + return 0 ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-13973 '' '' --quiet ++ grep -v 'switched to' ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-13973 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp ++ jq 'index("test2") != null' +++ local LAST_OUT=/tmp/tmp.VaWx0l3dfW ++++ mktemp +++ local LAST_ERR=/tmp/tmp.JxBqh4L3yc +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.VaWx0l3dfW +++ cat /tmp/tmp.JxBqh4L3yc +++ rm /tmp/tmp.VaWx0l3dfW /tmp/tmp.JxBqh4L3yc +++ return 0 ++ local client_container=psmdb-client-66f577db5f-g6m6m ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I29fSyy6p8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3nDU2Z2xzh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.I29fSyy6p8 ++ cat /tmp/tmp.3nDU2Z2xzh ++ rm /tmp/tmp.I29fSyy6p8 /tmp/tmp.3nDU2Z2xzh ++ return 0 + [[ false == \t\r\u\e ]] + yq .spec.selective.withUsersAndRoles=true + kubectl_bin apply -f - + yq '.spec.backupName="backup-minio"' + yq '.spec.selective.namespaces[0]="myApp.test"' + yq /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/restore.yml + yq '.metadata.name="restore-backup-minio-selective"' ++ mktemp + local LAST_OUT=/tmp/tmp.qfbQZA4PQF ++ mktemp + local LAST_ERR=/tmp/tmp.56zUYN5UEF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qfbQZA4PQF perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-selective created + cat /tmp/tmp.56zUYN5UEF + rm /tmp/tmp.qfbQZA4PQF /tmp/tmp.56zUYN5UEF + return 0 + wait_restore backup-minio-selective some-name + local backup_name=backup-minio-selective + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-selective object to be createdOK Waiting psmdb-restore/restore-backup-minio-selective to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gU4bIVGV5l +++ mktemp ++ local LAST_ERR=/tmp/tmp.X2FWYjSgmE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gU4bIVGV5l ++ cat /tmp/tmp.X2FWYjSgmE ++ rm /tmp/tmp.gU4bIVGV5l /tmp/tmp.X2FWYjSgmE ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo ++ collection_exists test2 ++ local collection=test2 ++ run_mongo 'use myApp\n JSON.stringify(db.getCollectionNames())' myApp:myPass@some-name-rs0.demand-backup-13973 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getCollectionNames())' ++ grep -v 'switched to' ++ local uri=myApp:myPass@some-name-rs0.demand-backup-13973 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ jq 'index("test2") != null' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.SQwjdDdCGv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.xcUXyDQ4Rf +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.SQwjdDdCGv +++ cat /tmp/tmp.xcUXyDQ4Rf +++ rm /tmp/tmp.SQwjdDdCGv /tmp/tmp.xcUXyDQ4Rf +++ return 0 ++ local client_container=psmdb-client-66f577db5f-g6m6m ++ local mongo_flag=--quiet ++ [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NHlibn35i9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AEmlrqYBpz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getCollectionNames())\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NHlibn35i9 ++ cat /tmp/tmp.AEmlrqYBpz ++ rm /tmp/tmp.NHlibn35i9 /tmp/tmp.AEmlrqYBpz ++ return 0 + [[ false == \t\r\u\e ]] + [[ true == \t\r\u\e ]] ++ user_exists test1user ++ local username=test1user ++ run_mongo 'use myApp\n JSON.stringify(db.getUsers())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getUsers())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ grep -v 'switched to' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.aFm5wXKFPg ++++ mktemp +++ local LAST_ERR=/tmp/tmp.EwJGPLtJyj +++ local exit_status=0 +++ local timeout=4 ++ jq 'any(.[]; ._id==myApp.test1user)' ++++ seq 0 2 jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1user) jq: 1 compile error +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.aFm5wXKFPg +++ cat /tmp/tmp.EwJGPLtJyj +++ rm /tmp/tmp.aFm5wXKFPg /tmp/tmp.EwJGPLtJyj +++ return 0 ++ local client_container=psmdb-client-66f577db5f-g6m6m ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jvx7XETa9X +++ mktemp ++ local LAST_ERR=/tmp/tmp.974o0PdvvY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getUsers())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jvx7XETa9X ++ cat /tmp/tmp.974o0PdvvY ++ rm /tmp/tmp.jvx7XETa9X /tmp/tmp.974o0PdvvY ++ return 0 + [[ '' == \f\a\l\s\e ]] ++ role_exists test1role ++ local role=test1role ++ run_mongo 'use myApp\n JSON.stringify(db.getRoles())' userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 '' '' --quiet ++ local 'command=use myApp\n JSON.stringify(db.getRoles())' ++ local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 ++ local driver=mongodb+srv ++ local suffix=.svc.cluster.local ++ grep -v 'switched to' ++ jq 'any(.[]; ._id==myApp.test1role)' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.bQaDOkVecU ++++ mktemp jq: error: myApp/0 is not defined at , line 1: any(.[]; ._id==myApp.test1role) jq: 1 compile error +++ local LAST_ERR=/tmp/tmp.ba8mncvBZs +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.bQaDOkVecU +++ cat /tmp/tmp.ba8mncvBZs +++ rm /tmp/tmp.bQaDOkVecU /tmp/tmp.ba8mncvBZs +++ return 0 ++ local client_container=psmdb-client-66f577db5f-g6m6m ++ local mongo_flag=--quiet ++ [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973 == *cfg* ]] ++ replica_set=rs0 ++ kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0uudnwqSj3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Qbs18f88NA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n JSON.stringify(db.getRoles())\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 --quiet' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0uudnwqSj3 ++ cat /tmp/tmp.Qbs18f88NA ++ rm /tmp/tmp.0uudnwqSj3 /tmp/tmp.Qbs18f88NA ++ return 0 + [[ '' == \f\a\l\s\e ]] + desc 'restore from backup source, with storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, with storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ sed -e 's/.json$//' ++ local LAST_OUT=/tmp/tmp.mBtmh6sKyl +++ mktemp ++ local LAST_ERR=/tmp/tmp.ApgrAPgEb0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mBtmh6sKyl ++ cat /tmp/tmp.ApgrAPgEb0 ++ rm /tmp/tmp.mBtmh6sKyl /tmp/tmp.ApgrAPgEb0 ++ return 0 + backup_dest_minio=operator-testing/2025-05-21T11:39:28Z + run_recovery_check_bkp_source backup-minio operator-testing/2025-05-21T11:39:28Z some-name-rs0 backup-minio-source-0 + local backup=backup-minio + local backup_dest=operator-testing/2025-05-21T11:39:28Z + local cluster=some-name-rs0 + local source=backup-minio-source-0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XXAhoS1vRv +++ mktemp ++ local LAST_ERR=/tmp/tmp.K8dmFw8Niu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.XXAhoS1vRv ++ cat /tmp/tmp.K8dmFw8Niu ++ rm /tmp/tmp.XXAhoS1vRv /tmp/tmp.K8dmFw8Niu ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Igqm5JFMlB ++ mktemp + local LAST_ERR=/tmp/tmp.TgJjdqKgZN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Igqm5JFMlB Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("7be2a9ff-8525-486a-bc33-bdb028d0c657") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.TgJjdqKgZN + rm /tmp/tmp.Igqm5JFMlB /tmp/tmp.TgJjdqKgZN + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-13973 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:43:53+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-13973 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j8V4Dn33BP +++ mktemp ++ local LAST_ERR=/tmp/tmp.5yOC5M15Lg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.j8V4Dn33BP ++ cat /tmp/tmp.5yOC5M15Lg ++ rm /tmp/tmp.j8V4Dn33BP /tmp/tmp.5yOC5M15Lg ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.enfMVxN23F ++ mktemp + local LAST_ERR=/tmp/tmp.2DCii4V2E2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.enfMVxN23F + cat /tmp/tmp.2DCii4V2E2 + rm /tmp/tmp.enfMVxN23F /tmp/tmp.2DCii4V2E2 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.O5rEQupLX6/find-2nd + run_restore_backupsource backup-minio-source-0 operator-testing/2025-05-21T11:39:28Z + local backupName=backup-minio-source-0 + local backupDest=operator-testing/2025-05-21T11:39:28Z + local storageName= + desc 'run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2025-05-21T11:39:28Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2025-05-21T11:39:28Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/restore-backupsource.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio-source-0/' + /usr/bin/sed -e 's|BACKUP-NAME|operator-testing/2025-05-21T11:39:28Z|' + /usr/bin/sed -e /storageName/d + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ILUy8SpQgX ++ mktemp + local LAST_ERR=/tmp/tmp.RjwZMpfsOY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ILUy8SpQgX perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-0 created + cat /tmp/tmp.RjwZMpfsOY + rm /tmp/tmp.ILUy8SpQgX /tmp/tmp.RjwZMpfsOY + return 0 + return + wait_restore backup-minio-source-0 some-name + local backup_name=backup-minio-source-0 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-source-0 object to be createdOK Waiting psmdb-restore/restore-backup-minio-source-0 to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CdhwxYmiRk +++ mktemp ++ local LAST_ERR=/tmp/tmp.ayCuBQsGp8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CdhwxYmiRk ++ cat /tmp/tmp.ayCuBQsGp8 ++ rm /tmp/tmp.CdhwxYmiRk /tmp/tmp.ayCuBQsGp8 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:44:15+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PBP8o2ETbs +++ mktemp ++ local LAST_ERR=/tmp/tmp.SHIozGtpFz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PBP8o2ETbs ++ cat /tmp/tmp.SHIozGtpFz ++ rm /tmp/tmp.PBP8o2ETbs /tmp/tmp.SHIozGtpFz ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ENedQ52EnR ++ mktemp + local LAST_ERR=/tmp/tmp.KQ4qMUd0Lt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ENedQ52EnR + cat /tmp/tmp.KQ4qMUd0Lt + rm /tmp/tmp.ENedQ52EnR /tmp/tmp.KQ4qMUd0Lt + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:44:17+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nLU8Guu0R6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.FuUIqQPm9n ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nLU8Guu0R6 ++ cat /tmp/tmp.FuUIqQPm9n ++ rm /tmp/tmp.nLU8Guu0R6 /tmp/tmp.FuUIqQPm9n ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.YNCPqYc3mH ++ mktemp + local LAST_ERR=/tmp/tmp.5mGRrFEtlc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YNCPqYc3mH + cat /tmp/tmp.5mGRrFEtlc + rm /tmp/tmp.YNCPqYc3mH /tmp/tmp.5mGRrFEtlc + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:44:20+0000] running db.test.find() in myApp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AIYKxrECep +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_ERR=/tmp/tmp.x6ch6FOueg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AIYKxrECep ++ cat /tmp/tmp.x6ch6FOueg ++ rm /tmp/tmp.AIYKxrECep /tmp/tmp.x6ch6FOueg ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.XL5RfurqYd ++ mktemp + local LAST_ERR=/tmp/tmp.5rUVEJcGdf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XL5RfurqYd + cat /tmp/tmp.5rUVEJcGdf + rm /tmp/tmp.XL5RfurqYd /tmp/tmp.5rUVEJcGdf + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + desc 'restore from backup source, no storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, no storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' +++ mktemp ++ sed 's|azure://||' ++ sed 's|s3://||' ++ sed -e 's/.json$//' ++ local LAST_OUT=/tmp/tmp.joB14wT2Nm +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z3bJ4CJjV4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.joB14wT2Nm ++ cat /tmp/tmp.Z3bJ4CJjV4 ++ rm /tmp/tmp.joB14wT2Nm /tmp/tmp.Z3bJ4CJjV4 ++ return 0 + backup_dest_minio=operator-testing/2025-05-21T11:39:28Z + run_recovery_check_bkp_source backup-minio operator-testing/2025-05-21T11:39:28Z some-name-rs0 backup-minio-source-1 + local backup=backup-minio + local backup_dest=operator-testing/2025-05-21T11:39:28Z + local cluster=some-name-rs0 + local source=backup-minio-source-1 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-13973 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FD4Sc4CxY9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.X6VkP7GZPb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FD4Sc4CxY9 ++ cat /tmp/tmp.X6VkP7GZPb ++ rm /tmp/tmp.FD4Sc4CxY9 /tmp/tmp.X6VkP7GZPb ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.HDs1jTauf9 ++ mktemp + local LAST_ERR=/tmp/tmp.JKEwMFyIsD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HDs1jTauf9 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("009e0c09-fb29-48fd-bd55-4abf8115aa61") } Percona Server for MongoDB server version: v7.0.18-11 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.JKEwMFyIsD + rm /tmp/tmp.HDs1jTauf9 /tmp/tmp.JKEwMFyIsD + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-13973 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:44:28+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-13973 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-13973 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.32bqqbioEt +++ mktemp ++ local LAST_ERR=/tmp/tmp.fN0I5a1Bo9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.32bqqbioEt ++ cat /tmp/tmp.fN0I5a1Bo9 ++ rm /tmp/tmp.32bqqbioEt /tmp/tmp.fN0I5a1Bo9 ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.WBid68vHye ++ mktemp + local LAST_ERR=/tmp/tmp.bWLn6wTcQd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WBid68vHye + cat /tmp/tmp.bWLn6wTcQd + rm /tmp/tmp.WBid68vHye /tmp/tmp.bWLn6wTcQd + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.O5rEQupLX6/find-2nd + run_restore_backupsource backup-minio-source-1 operator-testing/2025-05-21T11:39:28Z + local backupName=backup-minio-source-1 + local backupDest=operator-testing/2025-05-21T11:39:28Z + local storageName= + desc 'run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2025-05-21T11:39:28Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2025-05-21T11:39:28Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/restore-backupsource.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's/name:/name: restore-backup-minio-source-1/' + /usr/bin/sed -e 's|BACKUP-NAME|operator-testing/2025-05-21T11:39:28Z|' + /usr/bin/sed -e /storageName/d ++ mktemp + local LAST_OUT=/tmp/tmp.lCKrBQ5swI ++ mktemp + local LAST_ERR=/tmp/tmp.jTc96R1Vpu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lCKrBQ5swI perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-1 created + cat /tmp/tmp.jTc96R1Vpu + rm /tmp/tmp.lCKrBQ5swI /tmp/tmp.jTc96R1Vpu + return 0 + return + wait_restore backup-minio-source-1 some-name + local backup_name=backup-minio-source-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=1780 + local ok_if_ready=0 + set +o xtrace Waiting for the psmdb-restore/restore-backup-minio-source-1 object to be createdOK Waiting psmdb-restore/restore-backup-minio-source-1 to reach state "ready" OK + [[ 1 -eq 1 ]] + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BE7MWjaYMI +++ mktemp ++ local LAST_ERR=/tmp/tmp.ttYl7sDORi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BE7MWjaYMI ++ cat /tmp/tmp.ttYl7sDORi ++ rm /tmp/tmp.BE7MWjaYMI /tmp/tmp.ttYl7sDORi ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:44:53+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5Ogx7EOi2w +++ mktemp ++ local LAST_ERR=/tmp/tmp.zVhBFK6c7b ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5Ogx7EOi2w ++ cat /tmp/tmp.zVhBFK6c7b ++ rm /tmp/tmp.5Ogx7EOi2w /tmp/tmp.zVhBFK6c7b ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.MxNQaefTxI ++ mktemp + local LAST_ERR=/tmp/tmp.WhiHw2AvAQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MxNQaefTxI + cat /tmp/tmp.WhiHw2AvAQ + rm /tmp/tmp.MxNQaefTxI /tmp/tmp.WhiHw2AvAQ + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:44:56+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HBDbNd0Alw +++ mktemp ++ local LAST_ERR=/tmp/tmp.tSBvgqcr4b ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HBDbNd0Alw ++ cat /tmp/tmp.tSBvgqcr4b ++ rm /tmp/tmp.HBDbNd0Alw /tmp/tmp.tSBvgqcr4b ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.KDCz2VeavO ++ mktemp + local LAST_ERR=/tmp/tmp.IbwoipOyDs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KDCz2VeavO + cat /tmp/tmp.IbwoipOyDs + rm /tmp/tmp.KDCz2VeavO /tmp/tmp.IbwoipOyDs + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local postfix= + local suffix= + local database=myApp + local collection=test + local sort= + local 'full_command=db.test.find()' + [[ ! -z '' ]] + log 'running db.test.find() in myApp' + set +o xtrace [2025-05-21T11:44:58+0000] running db.test.find() in myApp + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 + local driver=mongodb + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o4JzfY3waD +++ mktemp ++ local LAST_ERR=/tmp/tmp.yQojw8lqCL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.o4JzfY3waD ++ cat /tmp/tmp.yQojw8lqCL ++ rm /tmp/tmp.o4JzfY3waD /tmp/tmp.yQojw8lqCL ++ return 0 + local client_container=psmdb-client-66f577db5f-g6m6m + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.UgRPsLcnIW ++ mktemp + local LAST_ERR=/tmp/tmp.tWedsani0T + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-66f577db5f-g6m6m -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-13973.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UgRPsLcnIW + cat /tmp/tmp.tWedsani0T + rm /tmp/tmp.UgRPsLcnIW /tmp/tmp.tWedsani0T + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/compare/find.json /tmp/tmp.O5rEQupLX6/find + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.Uol1C2zrnb ++ mktemp + local LAST_ERR=/tmp/tmp.EfEb5ZQbG4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Uol1C2zrnb perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted + cat /tmp/tmp.EfEb5ZQbG4 + rm /tmp/tmp.Uol1C2zrnb /tmp/tmp.EfEb5ZQbG4 + return 0 ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ grep -c operator-testing/2025-05-21T11:39:28Z ++ cat +++ mktemp ++ local LAST_OUT=/tmp/tmp.5vuq9OOGaE +++ mktemp ++ local LAST_ERR=/tmp/tmp.9VYrJIWWfP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5vuq9OOGaE ++ cat /tmp/tmp.9VYrJIWWfP If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-13973 ++ rm /tmp/tmp.5vuq9OOGaE /tmp/tmp.9VYrJIWWfP ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-21T11:38:53Z aws-s3 + path=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-21T11:38:53Z + storage_name=aws-s3 + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-21T11:38:53Z + [[ 403 -eq 403 ]] + check_backup_deletion https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z gcp-cs + path=https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z + storage_name=gcp-cs + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z + [[ 404 -eq 404 ]] + check_backup_deletion https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z azure-blob + path=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z + storage_name=azure-blob + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z + [[ 404 -eq 404 ]] + desc 'checking backup deletion without cr' + set +o xtrace ----------------------------------------------------------------------------------- checking backup deletion without cr ----------------------------------------------------------------------------------- + run_backup minio + local storage=minio + local backup_name=backup-minio + local type=logical + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/backup-minio.yml ++ mktemp + local LAST_OUT=/tmp/tmp.CsTtZmQchY ++ mktemp + local LAST_ERR=/tmp/tmp.PrRo5IRJ8x + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CsTtZmQchY perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.PrRo5IRJ8x + rm /tmp/tmp.CsTtZmQchY /tmp/tmp.PrRo5IRJ8x + return 0 + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + local type=logical + desc 'run backup backup-aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/backup-aws-s3.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.yAzjPL6n1c ++ mktemp + local LAST_ERR=/tmp/tmp.lR45ZWO8vX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yAzjPL6n1c perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.lR45ZWO8vX + rm /tmp/tmp.yAzjPL6n1c /tmp/tmp.lR45ZWO8vX + return 0 + run_backup gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + local type=logical + desc 'run backup backup-gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-gcp-cs" | .spec.storageName = "gcp-cs" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/backup-gcp-cs.yml ++ mktemp + local LAST_OUT=/tmp/tmp.vAGJuNAW4q ++ mktemp + local LAST_ERR=/tmp/tmp.uRiLL2nFLS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vAGJuNAW4q perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.uRiLL2nFLS + rm /tmp/tmp.vAGJuNAW4q /tmp/tmp.uRiLL2nFLS + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + local type=logical + desc 'run backup backup-azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob" | .spec.type = "logical"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/e2e-tests/demand-backup/conf/backup-azure-blob.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.pNzqnFSv64 ++ mktemp + local LAST_ERR=/tmp/tmp.Dcag43DADC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pNzqnFSv64 perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.Dcag43DADC + rm /tmp/tmp.pNzqnFSv64 /tmp/tmp.Dcag43DADC + return 0 + wait_backup backup-minio + local backup_name=backup-minio + local target_state=ready + set +o xtrace waiting for backup-minio to reach ready state.. + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + local target_state=ready + set +o xtrace waiting for backup-aws-s3 to reach ready state................. + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + local target_state=ready + set +o xtrace waiting for backup-gcp-cs to reach ready state. + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + local target_state=ready + set +o xtrace waiting for backup-azure-blob to reach ready state. + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.Z3VzHKzQWe ++ mktemp + local LAST_ERR=/tmp/tmp.HhPtI6x5zX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Z3VzHKzQWe perconaservermongodb.psmdb.percona.com "some-name" deleted + cat /tmp/tmp.HhPtI6x5zX + rm /tmp/tmp.Z3VzHKzQWe /tmp/tmp.HhPtI6x5zX + return 0 + sleep 60 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.3zOb7OESHC ++ mktemp + local LAST_ERR=/tmp/tmp.SEbXGbbbUw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3zOb7OESHC perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted + cat /tmp/tmp.SEbXGbbbUw + rm /tmp/tmp.3zOb7OESHC /tmp/tmp.SEbXGbbbUw + return 0 ++ grep -c operator-testing/2025-05-21T11:39:28Z ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ cat +++ mktemp ++ local LAST_OUT=/tmp/tmp.p1inbY5NCB +++ mktemp ++ local LAST_ERR=/tmp/tmp.SbOTa49y0M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.p1inbY5NCB ++ cat /tmp/tmp.SbOTa49y0M ++ rm /tmp/tmp.p1inbY5NCB /tmp/tmp.SbOTa49y0M ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-21T11:38:53Z aws-s3 + path=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-21T11:38:53Z + storage_name=aws-s3 + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2025-05-21T11:38:53Z + [[ 403 -eq 403 ]] + check_backup_deletion https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z gcp-cs + path=https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z + storage_name=gcp-cs + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2025-05-21T11:39:16Z + [[ 404 -eq 404 ]] + check_backup_deletion https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z azure-blob + path=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z + storage_name=azure-blob + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2025-05-21T11:39:04Z + [[ 404 -eq 404 ]] + desc 'check for passwords leak' + set +o xtrace ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- + check_passwords_leak + local secrets + local passwords + local pods ++ kubectl_bin get secrets -o json ++ jq -r '.items[].data | to_entries | .[] | select(.key | (contains("_PASSWORD"))) | .value' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8td6VwZE7X +++ mktemp ++ local LAST_ERR=/tmp/tmp.HKPFnAQBaF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8td6VwZE7X ++ cat /tmp/tmp.HKPFnAQBaF ++ rm /tmp/tmp.8td6VwZE7X /tmp/tmp.HKPFnAQBaF ++ return 0 + secrets='YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2' + echo secrets=YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 secrets=YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo + passwords='backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2' + echo passwords=backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456# backup123456%23 clusterAdmin123456 clusterAdmin123456 clusterMonitor123456 clusterMonitor123456 databaseAdmin123456 databaseAdmin123456 userAdmin123456 userAdmin123456 backup123456# clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2Iw== YmFja3VwMTIzNDU2JTIz Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2Iw== Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 ++ kubectl_bin get pods -o name ++ awk -F / '{print $2}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C6xEXyVGEN +++ mktemp ++ local LAST_ERR=/tmp/tmp.pybgH5rlec ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.C6xEXyVGEN ++ cat /tmp/tmp.pybgH5rlec ++ rm /tmp/tmp.C6xEXyVGEN /tmp/tmp.pybgH5rlec ++ return 0 + pods='minio-service-86dfccd949-hc9qv psmdb-client-66f577db5f-g6m6m' + echo pods=minio-service-86dfccd949-hc9qv psmdb-client-66f577db5f-g6m6m pods=minio-service-86dfccd949-hc9qv psmdb-client-66f577db5f-g6m6m + collect_logs demand-backup-13973 + local containers + local count + NS=demand-backup-13973 + for p in '$pods' ++ kubectl_bin -n demand-backup-13973 get pod minio-service-86dfccd949-hc9qv -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AbUPdERZWV +++ mktemp ++ local LAST_ERR=/tmp/tmp.1Sf5ElnldH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n demand-backup-13973 get pod minio-service-86dfccd949-hc9qv -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AbUPdERZWV ++ cat /tmp/tmp.1Sf5ElnldH ++ rm /tmp/tmp.AbUPdERZWV /tmp/tmp.1Sf5ElnldH ++ return 0 + containers=minio + for c in '$containers' + [[ minio =~ pmm ]] + kubectl_bin -n demand-backup-13973 logs minio-service-86dfccd949-hc9qv -c minio ++ mktemp + local LAST_OUT=/tmp/tmp.9ckwlsMXUe ++ mktemp + local LAST_ERR=/tmp/tmp.oOsDure1xU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n demand-backup-13973 logs minio-service-86dfccd949-hc9qv -c minio + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9ckwlsMXUe + cat /tmp/tmp.oOsDure1xU + rm /tmp/tmp.9ckwlsMXUe /tmp/tmp.oOsDure1xU + return 0 + echo logs saved in: /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt logs saved in: /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-minio-service-86dfccd949-hc9qv-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + for p in '$pods' ++ kubectl_bin -n demand-backup-13973 get pod psmdb-client-66f577db5f-g6m6m -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ayeoqbQLNN +++ mktemp ++ local LAST_ERR=/tmp/tmp.lREFVkxbKi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n demand-backup-13973 get pod psmdb-client-66f577db5f-g6m6m -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ayeoqbQLNN ++ cat /tmp/tmp.lREFVkxbKi ++ rm /tmp/tmp.ayeoqbQLNN /tmp/tmp.lREFVkxbKi ++ return 0 + containers=psmdb-client + for c in '$containers' + [[ psmdb-client =~ pmm ]] + kubectl_bin -n demand-backup-13973 logs psmdb-client-66f577db5f-g6m6m -c psmdb-client ++ mktemp + local LAST_OUT=/tmp/tmp.eBPunvAY1l ++ mktemp + local LAST_ERR=/tmp/tmp.O5Ta8Xp0p4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n demand-backup-13973 logs psmdb-client-66f577db5f-g6m6m -c psmdb-client + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eBPunvAY1l + cat /tmp/tmp.O5Ta8Xp0p4 + rm /tmp/tmp.eBPunvAY1l /tmp/tmp.O5Ta8Xp0p4 + return 0 + echo logs saved in: /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt logs saved in: /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-psmdb-client-66f577db5f-g6m6m-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + '[' -n psmdb-operator ']' ++ kubectl_bin -n psmdb-operator get pods -o name ++ awk -F / '{print $2}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4DO38EoD8h +++ mktemp ++ local LAST_ERR=/tmp/tmp.aHbTektaYK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n psmdb-operator get pods -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4DO38EoD8h ++ cat /tmp/tmp.aHbTektaYK ++ rm /tmp/tmp.4DO38EoD8h /tmp/tmp.aHbTektaYK ++ return 0 + pods=percona-server-mongodb-operator-5b4f87c7bd-2qvfk + collect_logs psmdb-operator + local containers + local count + NS=psmdb-operator + for p in '$pods' ++ kubectl_bin -n psmdb-operator get pod percona-server-mongodb-operator-5b4f87c7bd-2qvfk -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aqrpgIdafs +++ mktemp ++ local LAST_ERR=/tmp/tmp.GNKPRQ0zv1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n psmdb-operator get pod percona-server-mongodb-operator-5b4f87c7bd-2qvfk -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aqrpgIdafs ++ cat /tmp/tmp.GNKPRQ0zv1 ++ rm /tmp/tmp.aqrpgIdafs /tmp/tmp.GNKPRQ0zv1 ++ return 0 + containers=percona-server-mongodb-operator + for c in '$containers' + [[ percona-server-mongodb-operator =~ pmm ]] + kubectl_bin -n psmdb-operator logs percona-server-mongodb-operator-5b4f87c7bd-2qvfk -c percona-server-mongodb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.IIMMI0SCs1 ++ mktemp + local LAST_ERR=/tmp/tmp.DymSXrsIXj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n psmdb-operator logs percona-server-mongodb-operator-5b4f87c7bd-2qvfk -c percona-server-mongodb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IIMMI0SCs1 + cat /tmp/tmp.DymSXrsIXj + rm /tmp/tmp.IIMMI0SCs1 /tmp/tmp.DymSXrsIXj + return 0 + echo logs saved in: /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt logs saved in: /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456%23 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456# /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2JTIz /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2Iw== /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.O5rEQupLX6/logs_output-percona-server-mongodb-operator-5b4f87c7bd-2qvfk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + destroy demand-backup-13973 + local namespace=demand-backup-13973 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.ns7TUW7S5Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.mIRWlGAOWq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ns7TUW7S5Y ++ cat /tmp/tmp.mIRWlGAOWq No resources found in demand-backup-13973 namespace. ++ rm /tmp/tmp.ns7TUW7S5Y /tmp/tmp.mIRWlGAOWq ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.QJWUO7SGX1 ++ mktemp + local LAST_ERR=/tmp/tmp.Uv7fU89yjz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QJWUO7SGX1 customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Uv7fU89yjz + rm /tmp/tmp.QJWUO7SGX1 /tmp/tmp.Uv7fU89yjz + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.MChXcEcRzm ++ mktemp + local LAST_ERR=/tmp/tmp.e8hsmPZrZr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MChXcEcRzm + cat /tmp/tmp.e8hsmPZrZr + rm /tmp/tmp.MChXcEcRzm /tmp/tmp.e8hsmPZrZr + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.k1RLBDR1YE ++ mktemp + local LAST_ERR=/tmp/tmp.etkLxAKzCI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.k1RLBDR1YE + cat /tmp/tmp.etkLxAKzCI + rm /tmp/tmp.k1RLBDR1YE /tmp/tmp.etkLxAKzCI + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.QLLQpUlo21 ++ mktemp + local LAST_ERR=/tmp/tmp.xgmHeE2Y24 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QLLQpUlo21 + cat /tmp/tmp.xgmHeE2Y24 + rm /tmp/tmp.QLLQpUlo21 /tmp/tmp.xgmHeE2Y24 + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.73uFH3j7B1 ++ mktemp + local LAST_ERR=/tmp/tmp.hfR3IHZ3vf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1912/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.73uFH3j7B1 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.hfR3IHZ3vf + rm /tmp/tmp.73uFH3j7B1 /tmp/tmp.hfR3IHZ3vf + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.GFrrSQYxDe ++ mktemp + local LAST_ERR=/tmp/tmp.viymoWv5Wa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.GFrrSQYxDe + cat /tmp/tmp.viymoWv5Wa Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.GFrrSQYxDe + cat /tmp/tmp.viymoWv5Wa Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.GFrrSQYxDe + cat /tmp/tmp.viymoWv5Wa Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.GFrrSQYxDe + cat /tmp/tmp.viymoWv5Wa Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.17.2/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.GFrrSQYxDe /tmp/tmp.viymoWv5Wa + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace demand-backup-13973 + rm -rf /tmp/tmp.O5rEQupLX6 + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.lvNzAsnp92 + desc 'test passed' + local LAST_OUT=/tmp/tmp.s88q6G5dJe + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.5wKFL30xR8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.i5qjoGOjzC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace demand-backup-13973