Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/logs/demand-backup.log WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.27) exceeds the supported minor version skew of +/-1 + create_infra demand-backup-29329 + local ns=demand-backup-29329 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.7Btc4TGCgB ++ mktemp + local LAST_ERR=/tmp/tmp.eliXhLnOj0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7Btc4TGCgB customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.eliXhLnOj0 + rm /tmp/tmp.7Btc4TGCgB /tmp/tmp.eliXhLnOj0 + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n demand-backup-13289 backup-minio --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/backup-minio patched + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.6csr713Sbv ++ mktemp + local LAST_ERR=/tmp/tmp.x58jLjOj8J + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6csr713Sbv customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com condition met + cat /tmp/tmp.x58jLjOj8J + rm /tmp/tmp.6csr713Sbv /tmp/tmp.x58jLjOj8J + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.1tywqC4Dcw ++ mktemp + local LAST_ERR=/tmp/tmp.vNesfXMhGA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1tywqC4Dcw + cat /tmp/tmp.vNesfXMhGA + rm /tmp/tmp.1tywqC4Dcw /tmp/tmp.vNesfXMhGA + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.n9kR4tMoRF ++ mktemp + local LAST_ERR=/tmp/tmp.7D76hyd6cP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.n9kR4tMoRF + cat /tmp/tmp.7D76hyd6cP + rm /tmp/tmp.n9kR4tMoRF /tmp/tmp.7D76hyd6cP + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.zzCxoObwkF ++ mktemp + local LAST_ERR=/tmp/tmp.kvT10AeTf2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zzCxoObwkF clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.kvT10AeTf2 + rm /tmp/tmp.zzCxoObwkF /tmp/tmp.kvT10AeTf2 + return 0 + check_crd_for_deletion PR-1612-57a92fde + local git_tag=PR-1612-57a92fde ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1612-57a92fde/deploy/crd.yaml ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' ++ /usr/bin/sed s/---//g ++ yq eval .metadata.name + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r2bwFnNuSk +++ mktemp ++ local LAST_ERR=/tmp/tmp.6FU6srGpkt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.r2bwFnNuSk ++ cat /tmp/tmp.6FU6srGpkt Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.r2bwFnNuSk ++ cat /tmp/tmp.6FU6srGpkt Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.r2bwFnNuSk ++ cat /tmp/tmp.6FU6srGpkt Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.r2bwFnNuSk ++ cat /tmp/tmp.6FU6srGpkt Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.r2bwFnNuSk /tmp/tmp.6FU6srGpkt ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.MHa1q5nAaw + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.CjV7QCBc64 ++ mktemp + local LAST_ERR=/tmp/tmp.UIOhhXL3AJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + awk '{print$1}' + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + local LAST_ERR=/tmp/tmp.2AXpZgpvC4 + local exit_status=0 + local timeout=4 + xargs kubectl delete ns ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CjV7QCBc64 + cat /tmp/tmp.UIOhhXL3AJ + rm /tmp/tmp.CjV7QCBc64 /tmp/tmp.UIOhhXL3AJ + return 0 namespace "demand-backup-13289" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MHa1q5nAaw namespace "psmdb-operator" deleted + cat /tmp/tmp.2AXpZgpvC4 + rm /tmp/tmp.MHa1q5nAaw /tmp/tmp.2AXpZgpvC4 + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.CxNeDyAftN ++ mktemp + local LAST_ERR=/tmp/tmp.tTByKomuxZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CxNeDyAftN + cat /tmp/tmp.tTByKomuxZ + rm /tmp/tmp.CxNeDyAftN /tmp/tmp.tTByKomuxZ + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.af6BNFN1tF ++ mktemp + local LAST_ERR=/tmp/tmp.TkFM0MPVot + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.af6BNFN1tF namespace/psmdb-operator created + cat /tmp/tmp.TkFM0MPVot + rm /tmp/tmp.af6BNFN1tF /tmp/tmp.TkFM0MPVot + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.wDHh8oGOQ5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZGTYbvJzSB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wDHh8oGOQ5 ++ cat /tmp/tmp.ZGTYbvJzSB ++ rm /tmp/tmp.wDHh8oGOQ5 /tmp/tmp.ZGTYbvJzSB ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1612-57a92fde-7-cluster8 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.AsrKqKPNym ++ mktemp + local LAST_ERR=/tmp/tmp.ZstoT5tUrg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1612-57a92fde-7-cluster8 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AsrKqKPNym Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1612-57a92fde-7-cluster8" modified. + cat /tmp/tmp.ZstoT5tUrg + rm /tmp/tmp.AsrKqKPNym /tmp/tmp.ZstoT5tUrg + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.wMSthSmBKt ++ mktemp + local LAST_ERR=/tmp/tmp.FohnjPNVdE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wMSthSmBKt customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.FohnjPNVdE + rm /tmp/tmp.wMSthSmBKt /tmp/tmp.FohnjPNVdE + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.muHn7KmcuA ++ mktemp + local LAST_ERR=/tmp/tmp.H6Fe7dAkze + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.muHn7KmcuA clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.H6Fe7dAkze + rm /tmp/tmp.muHn7KmcuA /tmp/tmp.H6Fe7dAkze + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1612-57a92fde") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/cw-operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.XJJ5Ox7qVz ++ mktemp + local LAST_ERR=/tmp/tmp.20jc8qYI6b + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XJJ5Ox7qVz deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.20jc8qYI6b + rm /tmp/tmp.XJJ5Ox7qVz /tmp/tmp.20jc8qYI6b + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.8H6hquvfMX +++ mktemp ++ local LAST_ERR=/tmp/tmp.E90XvRUmb6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8H6hquvfMX ++ cat /tmp/tmp.E90XvRUmb6 ++ rm /tmp/tmp.8H6hquvfMX /tmp/tmp.E90XvRUmb6 ++ return 0 + wait_pod percona-server-mongodb-operator-bdc5b774b-jtgt4 + local pod=percona-server-mongodb-operator-bdc5b774b-jtgt4 + set +o xtrace waiting for pod/percona-server-mongodb-operator-bdc5b774b-jtgt4 to be ready.OK + create_namespace demand-backup-29329 + local namespace=demand-backup-29329 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + awk '{print$1}' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces demand-backup-29329' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-29329 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace demand-backup-29329 --ignore-not-found ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.EllYf2agtU + local LAST_OUT=/tmp/tmp.6zIZ6Ok5fT ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.eGsEx4UuCN + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.G4dLdYRlOU + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace demand-backup-29329 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6zIZ6Ok5fT + cat /tmp/tmp.eGsEx4UuCN + rm /tmp/tmp.6zIZ6Ok5fT /tmp/tmp.eGsEx4UuCN + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EllYf2agtU + cat /tmp/tmp.G4dLdYRlOU + rm /tmp/tmp.EllYf2agtU /tmp/tmp.G4dLdYRlOU + return 0 + kubectl_bin wait --for=delete namespace demand-backup-29329 ++ mktemp + local LAST_OUT=/tmp/tmp.ljJjjZjKFq ++ mktemp + local LAST_ERR=/tmp/tmp.Mcqap4nCfx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace demand-backup-29329 namespace "gmp-public" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ljJjjZjKFq + cat /tmp/tmp.Mcqap4nCfx + rm /tmp/tmp.ljJjjZjKFq /tmp/tmp.Mcqap4nCfx + return 0 + desc 'create namespace demand-backup-29329' + set +o xtrace ----------------------------------------------------------------------------------- create namespace demand-backup-29329 ----------------------------------------------------------------------------------- + kubectl_bin create namespace demand-backup-29329 ++ mktemp + local LAST_OUT=/tmp/tmp.O9Jf7Fnz1P ++ mktemp + local LAST_ERR=/tmp/tmp.KAtyNu2ZFj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace demand-backup-29329 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.O9Jf7Fnz1P namespace/demand-backup-29329 created + cat /tmp/tmp.KAtyNu2ZFj + rm /tmp/tmp.O9Jf7Fnz1P /tmp/tmp.KAtyNu2ZFj + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rr6pRG0hSH +++ mktemp ++ local LAST_ERR=/tmp/tmp.9xX6sHEx4m ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Rr6pRG0hSH ++ cat /tmp/tmp.9xX6sHEx4m ++ rm /tmp/tmp.Rr6pRG0hSH /tmp/tmp.9xX6sHEx4m ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1612-57a92fde-7-cluster8 --namespace=demand-backup-29329 ++ mktemp + local LAST_OUT=/tmp/tmp.x1y04gVnqh ++ mktemp + local LAST_ERR=/tmp/tmp.NG5Q81qOKr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1612-57a92fde-7-cluster8 --namespace=demand-backup-29329 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.x1y04gVnqh Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1612-57a92fde-7-cluster8" modified. + cat /tmp/tmp.NG5Q81qOKr + rm /tmp/tmp.x1y04gVnqh /tmp/tmp.NG5Q81qOKr + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Wed Jul 31 14:58:11 2024 NAMESPACE: demand-backup-29329 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-29329.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-29329 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-29329 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-29329 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-29329 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lTjUqmJvjQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.TKEpDrOzG2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lTjUqmJvjQ ++ cat /tmp/tmp.TKEpDrOzG2 ++ rm /tmp/tmp.lTjUqmJvjQ /tmp/tmp.TKEpDrOzG2 ++ return 0 + MINIO_POD=minio-service-6ff7647778-4ck4c + wait_pod minio-service-6ff7647778-4ck4c + local pod=minio-service-6ff7647778-4ck4c + set +o xtrace waiting for pod/minio-service-6ff7647778-4ck4c to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-29329.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.Xu8sxIR7Yx ++ mktemp + local LAST_ERR=/tmp/tmp.8uBc18n15m + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.demand-backup-29329.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Xu8sxIR7Yx service/minio-service created + cat /tmp/tmp.8uBc18n15m + rm /tmp/tmp.Xu8sxIR7Yx /tmp/tmp.8uBc18n15m + return 0 + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.8AQ8Fy7iIP ++ mktemp + local LAST_ERR=/tmp/tmp.2nX3xOzyMw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8AQ8Fy7iIP make_bucket: operator-testing pod "aws-cli" deleted + cat /tmp/tmp.2nX3xOzyMw If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-29329 + rm /tmp/tmp.8AQ8Fy7iIP /tmp/tmp.2nX3xOzyMw + return 0 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + cluster=some-name-rs0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.qbicNlkuUo ++ mktemp + local LAST_ERR=/tmp/tmp.LQJcl2l0Vy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qbicNlkuUo secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.LQJcl2l0Vy + rm /tmp/tmp.qbicNlkuUo /tmp/tmp.LQJcl2l0Vy + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.5Su6vwaK8v ++ mktemp + local LAST_ERR=/tmp/tmp.WiqthtWROn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5Su6vwaK8v secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.WiqthtWROn + rm /tmp/tmp.5Su6vwaK8v /tmp/tmp.WiqthtWROn + return 0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/some-name-rs0.yml + yq eval '.spec.upgradeOptions.apply="Never"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1612-57a92fde"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.HbF74fbMjk + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' ++ mktemp + local LAST_ERR=/tmp/tmp.9PtUCzGLda + local exit_status=0 + local timeout=4 + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/some-name-rs0.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HbF74fbMjk perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.9PtUCzGLda + rm /tmp/tmp.HbF74fbMjk /tmp/tmp.9PtUCzGLda + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready..................OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready.......OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.srW4D9lNpz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZVatpRAC3q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.srW4D9lNpz ++ cat /tmp/tmp.ZVatpRAC3q ++ rm /tmp/tmp.srW4D9lNpz /tmp/tmp.ZVatpRAC3q ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready....OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KWzutEwMtN +++ mktemp ++ local LAST_ERR=/tmp/tmp.f7xwwu01MZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KWzutEwMtN ++ cat /tmp/tmp.f7xwwu01MZ ++ rm /tmp/tmp.KWzutEwMtN /tmp/tmp.f7xwwu01MZ ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness..... + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.11cGdhxzv9/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/statefulset_some-name-rs0-oc.yml ']' + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("demand-backup-29329", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + kubectl_bin get -o yaml statefulset/some-name-rs0 ++ mktemp + local LAST_OUT=/tmp/tmp.EQ9IT6lKKG ++ mktemp + local LAST_ERR=/tmp/tmp.CF55XAbY2r + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EQ9IT6lKKG + cat /tmp/tmp.CF55XAbY2r + rm /tmp/tmp.EQ9IT6lKKG /tmp/tmp.CF55XAbY2r + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.11cGdhxzv9/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.27 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.11cGdhxzv9/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.11cGdhxzv9/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/statefulset_some-name-rs0.yml /tmp/tmp.11cGdhxzv9/statefulset_some-name-rs0.yml + desc 'create user' + set +o xtrace ----------------------------------------------------------------------------------- create user ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.demand-backup-29329 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.demand-backup-29329 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qel3I386ji +++ mktemp ++ local LAST_ERR=/tmp/tmp.ivNsrAXAQT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Qel3I386ji ++ cat /tmp/tmp.ivNsrAXAQT ++ rm /tmp/tmp.Qel3I386ji /tmp/tmp.ivNsrAXAQT ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.wzjzcW7Vj8 ++ mktemp + local LAST_ERR=/tmp/tmp.dGLeH3EuRz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wzjzcW7Vj8 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("7764d7da-a0f8-4538-889b-053ddfdc2263") } Percona Server for MongoDB server version: v7.0.12-7 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.dGLeH3EuRz + rm /tmp/tmp.wzjzcW7Vj8 /tmp/tmp.dGLeH3EuRz + return 0 + sleep 2 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.demand-backup-29329 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bck6gmLgxz +++ mktemp ++ local LAST_ERR=/tmp/tmp.iwj3vYmHFi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.bck6gmLgxz ++ cat /tmp/tmp.iwj3vYmHFi ++ rm /tmp/tmp.bck6gmLgxz /tmp/tmp.iwj3vYmHFi ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ieYmUoFIUe ++ mktemp + local LAST_ERR=/tmp/tmp.TOlxnvB87m + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ieYmUoFIUe Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("48bb4883-8b82-4414-9d13-99e8025f1ae3") } Percona Server for MongoDB server version: v7.0.12-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.TOlxnvB87m + rm /tmp/tmp.ieYmUoFIUe /tmp/tmp.TOlxnvB87m + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rqxI5RF61a +++ mktemp ++ local LAST_ERR=/tmp/tmp.PG6XvP0D10 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rqxI5RF61a ++ cat /tmp/tmp.PG6XvP0D10 ++ rm /tmp/tmp.rqxI5RF61a /tmp/tmp.PG6XvP0D10 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.sz7lz500gY ++ mktemp + local LAST_ERR=/tmp/tmp.jCjfApxcuI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sz7lz500gY + cat /tmp/tmp.jCjfApxcuI + rm /tmp/tmp.sz7lz500gY /tmp/tmp.jCjfApxcuI + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ckG85JrCne +++ mktemp ++ local LAST_ERR=/tmp/tmp.G4NggFvYZf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ckG85JrCne ++ cat /tmp/tmp.G4NggFvYZf ++ rm /tmp/tmp.ckG85JrCne /tmp/tmp.G4NggFvYZf ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.GjMgFxT6kt ++ mktemp + local LAST_ERR=/tmp/tmp.6Gk3ZhF0Qz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GjMgFxT6kt + cat /tmp/tmp.6Gk3ZhF0Qz + rm /tmp/tmp.GjMgFxT6kt /tmp/tmp.6Gk3ZhF0Qz + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t92aP6lkKx +++ mktemp ++ local LAST_ERR=/tmp/tmp.2YXLkdPfRc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.t92aP6lkKx ++ cat /tmp/tmp.2YXLkdPfRc ++ rm /tmp/tmp.t92aP6lkKx /tmp/tmp.2YXLkdPfRc ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.o8dVRHhbbg ++ mktemp + local LAST_ERR=/tmp/tmp.APfn4LxOdw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.o8dVRHhbbg + cat /tmp/tmp.APfn4LxOdw + rm /tmp/tmp.o8dVRHhbbg /tmp/tmp.APfn4LxOdw + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace some-name-rs0-0 + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace some-name-rs0-1 + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace some-name-rs0-2 + backup_name_minio=backup-minio + '[' -z '' ']' + backup_name_aws=backup-aws-s3 + backup_name_gcp=backup-gcp-cs + backup_name_azure=backup-azure-blob + desc 'run backups' + set +o xtrace ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + desc 'run backup backup-aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/backup-aws-s3.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ePTvBz7OqR ++ mktemp + local LAST_ERR=/tmp/tmp.18Cga8QTRa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ePTvBz7OqR perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.18Cga8QTRa + rm /tmp/tmp.ePTvBz7OqR /tmp/tmp.18Cga8QTRa + return 0 + run_backup gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + desc 'run backup backup-gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-gcp-cs" | .spec.storageName = "gcp-cs"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/backup-gcp-cs.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.PkNMkkXf3D ++ mktemp + local LAST_ERR=/tmp/tmp.0vndfGQqHZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PkNMkkXf3D perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.0vndfGQqHZ + rm /tmp/tmp.PkNMkkXf3D /tmp/tmp.0vndfGQqHZ + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + desc 'run backup backup-azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/backup-azure-blob.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.sDVYaKZNbT ++ mktemp + local LAST_ERR=/tmp/tmp.kqoO9rJAYh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sDVYaKZNbT perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.kqoO9rJAYh + rm /tmp/tmp.sDVYaKZNbT /tmp/tmp.kqoO9rJAYh + return 0 + run_backup minio + local storage=minio + local backup_name=backup-minio + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/backup-minio.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.0fSr9nasgO ++ mktemp + local LAST_ERR=/tmp/tmp.5KouZnYhwL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0fSr9nasgO perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.5KouZnYhwL + rm /tmp/tmp.0fSr9nasgO /tmp/tmp.5KouZnYhwL + return 0 + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + set +o xtrace backup-aws-s3............ + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + set +o xtrace backup-gcp-cs.............. + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + set +o xtrace backup-azure-blob....................................... + wait_backup backup-minio + local backup_name=backup-minio + set +o xtrace backup-minio. + sleep 5 + '[' -z '' ']' + desc 'check backup and restore -- aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- ++ get_backup_dest backup-aws-s3 ++ local backup_name=backup-aws-s3 ++ sed 's|azure://||' ++ sed 's|s3://||' ++ sed -e 's/.json$//' ++ kubectl_bin get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lcvQANyq21 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EHIH2AQcnk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-aws-s3 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.lcvQANyq21 ++ cat /tmp/tmp.EHIH2AQcnk ++ rm /tmp/tmp.lcvQANyq21 /tmp/tmp.EHIH2AQcnk ++ return 0 + backup_dest_aws=operator-testing/psmdb-demand-backup/2024-07-31T15:01:39Z + gunzip + curl -s https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2024-07-31T15:01:39Z/rs0/myApp.test.gz + run_recovery_check backup-aws-s3 some-name-rs0 + local backup=backup-aws-s3 + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-29329 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UOHZJkPld6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.EXYjTCIp7n ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UOHZJkPld6 ++ cat /tmp/tmp.EXYjTCIp7n ++ rm /tmp/tmp.UOHZJkPld6 /tmp/tmp.EXYjTCIp7n ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.AFIo4ZL9Qy ++ mktemp + local LAST_ERR=/tmp/tmp.oefY8gRPeQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AFIo4ZL9Qy Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("863aa12b-0466-4206-8afa-72fe1b7e19c7") } Percona Server for MongoDB server version: v7.0.12-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.oefY8gRPeQ + rm /tmp/tmp.AFIo4ZL9Qy /tmp/tmp.oefY8gRPeQ + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-29329 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-29329 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KBz8lT0kji +++ mktemp ++ local LAST_ERR=/tmp/tmp.BTRyX2WaN2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KBz8lT0kji ++ cat /tmp/tmp.BTRyX2WaN2 ++ rm /tmp/tmp.KBz8lT0kji /tmp/tmp.BTRyX2WaN2 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.eQ1nDxVjkU ++ mktemp + local LAST_ERR=/tmp/tmp.AbKY7OGTlT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eQ1nDxVjkU + cat /tmp/tmp.AbKY7OGTlT + rm /tmp/tmp.eQ1nDxVjkU /tmp/tmp.AbKY7OGTlT + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.11cGdhxzv9/find-2nd + run_restore backup-aws-s3 + local backup_name=backup-aws-s3 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/backupName:/backupName: backup-aws-s3/' + /usr/bin/sed -e 's/name:/name: restore-backup-aws-s3/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.EYi1zq27xI ++ mktemp + local LAST_ERR=/tmp/tmp.j1FPYQIOkX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EYi1zq27xI perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created + cat /tmp/tmp.j1FPYQIOkX + rm /tmp/tmp.EYi1zq27xI /tmp/tmp.j1FPYQIOkX + return 0 + wait_restore backup-aws-s3 some-name + local backup_name=backup-aws-s3 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/backup-aws-s3 to reach ready state............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vg6FJV8ha2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.8TmuMo64og ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vg6FJV8ha2 ++ cat /tmp/tmp.8TmuMo64og ++ rm /tmp/tmp.vg6FJV8ha2 /tmp/tmp.8TmuMo64og ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.abr1tlBIze +++ mktemp ++ local LAST_ERR=/tmp/tmp.wLI4aikbHx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.abr1tlBIze ++ cat /tmp/tmp.wLI4aikbHx ++ rm /tmp/tmp.abr1tlBIze /tmp/tmp.wLI4aikbHx ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.BxSpb0trzM ++ mktemp + local LAST_ERR=/tmp/tmp.8QeHA93mxX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BxSpb0trzM + cat /tmp/tmp.8QeHA93mxX + rm /tmp/tmp.BxSpb0trzM /tmp/tmp.8QeHA93mxX + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mb0k4u2B9t +++ mktemp ++ local LAST_ERR=/tmp/tmp.Nme8F57v9T ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mb0k4u2B9t ++ cat /tmp/tmp.Nme8F57v9T ++ rm /tmp/tmp.mb0k4u2B9t /tmp/tmp.Nme8F57v9T ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Nc890xyS2E ++ mktemp + local LAST_ERR=/tmp/tmp.EwbF44s3b5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Nc890xyS2E + cat /tmp/tmp.EwbF44s3b5 + rm /tmp/tmp.Nc890xyS2E /tmp/tmp.EwbF44s3b5 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k4nhfzITNO +++ mktemp ++ local LAST_ERR=/tmp/tmp.8FYBmfasqx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.k4nhfzITNO ++ cat /tmp/tmp.8FYBmfasqx ++ rm /tmp/tmp.k4nhfzITNO /tmp/tmp.8FYBmfasqx ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.fH7j49SvCP ++ mktemp + local LAST_ERR=/tmp/tmp.DhHFtUMOFq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fH7j49SvCP + cat /tmp/tmp.DhHFtUMOFq + rm /tmp/tmp.fH7j49SvCP /tmp/tmp.DhHFtUMOFq + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- ++ get_backup_dest backup-gcp-cs ++ local backup_name=backup-gcp-cs ++ kubectl_bin get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.RqhXa2QxoR +++ mktemp ++ local LAST_ERR=/tmp/tmp.400lsD6zHW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-gcp-cs -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RqhXa2QxoR ++ cat /tmp/tmp.400lsD6zHW ++ rm /tmp/tmp.RqhXa2QxoR /tmp/tmp.400lsD6zHW ++ return 0 + backup_dest_gcp=operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z + curl -s https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z/rs0/myApp.test.gz + gunzip + run_recovery_check backup-gcp-cs some-name-rs0 + local backup=backup-gcp-cs + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-29329 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3O3zyeBbKK +++ mktemp ++ local LAST_ERR=/tmp/tmp.B068HWiQkw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3O3zyeBbKK ++ cat /tmp/tmp.B068HWiQkw ++ rm /tmp/tmp.3O3zyeBbKK /tmp/tmp.B068HWiQkw ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SF1tX1q42F ++ mktemp + local LAST_ERR=/tmp/tmp.6ITVV08zYZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SF1tX1q42F Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("15f241c8-c479-49a0-8bc1-9211a475a933") } Percona Server for MongoDB server version: v7.0.12-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.6ITVV08zYZ + rm /tmp/tmp.SF1tX1q42F /tmp/tmp.6ITVV08zYZ + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-29329 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-29329 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2SaUjCE0bS +++ mktemp ++ local LAST_ERR=/tmp/tmp.OxT28KFSAu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2SaUjCE0bS ++ cat /tmp/tmp.OxT28KFSAu ++ rm /tmp/tmp.2SaUjCE0bS /tmp/tmp.OxT28KFSAu ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.vsUxb1kNUY ++ mktemp + local LAST_ERR=/tmp/tmp.lb3svjr7Tx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vsUxb1kNUY + cat /tmp/tmp.lb3svjr7Tx + rm /tmp/tmp.vsUxb1kNUY /tmp/tmp.lb3svjr7Tx + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.11cGdhxzv9/find-2nd + run_restore backup-gcp-cs + local backup_name=backup-gcp-cs + /usr/bin/sed -e 's/backupName:/backupName: backup-gcp-cs/' + /usr/bin/sed -e 's/name:/name: restore-backup-gcp-cs/' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/restore.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Bnyt9CP7Cv ++ mktemp + local LAST_ERR=/tmp/tmp.E3ZKR5gZvi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Bnyt9CP7Cv perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created + cat /tmp/tmp.E3ZKR5gZvi + rm /tmp/tmp.Bnyt9CP7Cv /tmp/tmp.E3ZKR5gZvi + return 0 + wait_restore backup-gcp-cs some-name + local backup_name=backup-gcp-cs + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/backup-gcp-cs to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iEy8KDiUuR +++ mktemp ++ local LAST_ERR=/tmp/tmp.njEqxwu5ag ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iEy8KDiUuR ++ cat /tmp/tmp.njEqxwu5ag ++ rm /tmp/tmp.iEy8KDiUuR /tmp/tmp.njEqxwu5ag ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZgF4V9WqCX +++ mktemp ++ local LAST_ERR=/tmp/tmp.5357xAfvTR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZgF4V9WqCX ++ cat /tmp/tmp.5357xAfvTR ++ rm /tmp/tmp.ZgF4V9WqCX /tmp/tmp.5357xAfvTR ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.eCDnCszdjF ++ mktemp + local LAST_ERR=/tmp/tmp.3qNHcnOlq7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eCDnCszdjF + cat /tmp/tmp.3qNHcnOlq7 + rm /tmp/tmp.eCDnCszdjF /tmp/tmp.3qNHcnOlq7 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VH9QpgPt2Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.pm80WD4k9t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VH9QpgPt2Y ++ cat /tmp/tmp.pm80WD4k9t ++ rm /tmp/tmp.VH9QpgPt2Y /tmp/tmp.pm80WD4k9t ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.b4T4Amv8IK ++ mktemp + local LAST_ERR=/tmp/tmp.KbmXAtlndV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.b4T4Amv8IK + cat /tmp/tmp.KbmXAtlndV + rm /tmp/tmp.b4T4Amv8IK /tmp/tmp.KbmXAtlndV + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5DZn3zmXNy +++ mktemp ++ local LAST_ERR=/tmp/tmp.5i4d5MTOza ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5DZn3zmXNy ++ cat /tmp/tmp.5i4d5MTOza ++ rm /tmp/tmp.5DZn3zmXNy /tmp/tmp.5i4d5MTOza ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.PHIducpfsw ++ mktemp + local LAST_ERR=/tmp/tmp.FVtQKJcS3B + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PHIducpfsw + cat /tmp/tmp.FVtQKJcS3B + rm /tmp/tmp.PHIducpfsw /tmp/tmp.FVtQKJcS3B + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- ++ get_backup_dest backup-azure-blob ++ local backup_name=backup-azure-blob ++ kubectl_bin get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed -e 's/.json$//' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4eXHWNpphP +++ mktemp ++ local LAST_ERR=/tmp/tmp.26RNwHb167 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-azure-blob -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4eXHWNpphP ++ cat /tmp/tmp.26RNwHb167 ++ rm /tmp/tmp.4eXHWNpphP /tmp/tmp.26RNwHb167 ++ return 0 + backup_dest_azure=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z + curl -s https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z/rs0/myApp.test.gz + gunzip + run_recovery_check backup-azure-blob some-name-rs0 + local backup=backup-azure-blob + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-29329 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6TvjqcYXVn +++ mktemp ++ local LAST_ERR=/tmp/tmp.7JXY2zWCTe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6TvjqcYXVn ++ cat /tmp/tmp.7JXY2zWCTe ++ rm /tmp/tmp.6TvjqcYXVn /tmp/tmp.7JXY2zWCTe ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.xob17dBgb7 ++ mktemp + local LAST_ERR=/tmp/tmp.Wz0Ynf6PNr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xob17dBgb7 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("69eccd65-a168-4c87-89c3-95eeafcc7a62") } Percona Server for MongoDB server version: v7.0.12-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.Wz0Ynf6PNr + rm /tmp/tmp.xob17dBgb7 /tmp/tmp.Wz0Ynf6PNr + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-29329 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-29329 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YW621vgmR2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6lAAfayVO2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YW621vgmR2 ++ cat /tmp/tmp.6lAAfayVO2 ++ rm /tmp/tmp.YW621vgmR2 /tmp/tmp.6lAAfayVO2 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rYuYlEJ6YC ++ mktemp + local LAST_ERR=/tmp/tmp.v2tW7o0WHE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rYuYlEJ6YC + cat /tmp/tmp.v2tW7o0WHE + rm /tmp/tmp.rYuYlEJ6YC /tmp/tmp.v2tW7o0WHE + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.11cGdhxzv9/find-2nd + run_restore backup-azure-blob + local backup_name=backup-azure-blob + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/backupName:/backupName: backup-azure-blob/' + /usr/bin/sed -e 's/name:/name: restore-backup-azure-blob/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.mN4fSSC7iv ++ mktemp + local LAST_ERR=/tmp/tmp.ZPGdXPI2DY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mN4fSSC7iv perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created + cat /tmp/tmp.ZPGdXPI2DY + rm /tmp/tmp.mN4fSSC7iv /tmp/tmp.ZPGdXPI2DY + return 0 + wait_restore backup-azure-blob some-name + local backup_name=backup-azure-blob + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/backup-azure-blob to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SUVUtgDL2q +++ mktemp ++ local LAST_ERR=/tmp/tmp.EXyUewSZF8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SUVUtgDL2q ++ cat /tmp/tmp.EXyUewSZF8 ++ rm /tmp/tmp.SUVUtgDL2q /tmp/tmp.EXyUewSZF8 ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eirOrU7uEM +++ mktemp ++ local LAST_ERR=/tmp/tmp.pMpoWbG1ex ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eirOrU7uEM ++ cat /tmp/tmp.pMpoWbG1ex ++ rm /tmp/tmp.eirOrU7uEM /tmp/tmp.pMpoWbG1ex ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.tzJKd6Evvu ++ mktemp + local LAST_ERR=/tmp/tmp.aLkm5RKnog + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tzJKd6Evvu + cat /tmp/tmp.aLkm5RKnog + rm /tmp/tmp.tzJKd6Evvu /tmp/tmp.aLkm5RKnog + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jwp9Yo6wI3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sR5ge1bkar ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jwp9Yo6wI3 ++ cat /tmp/tmp.sR5ge1bkar ++ rm /tmp/tmp.jwp9Yo6wI3 /tmp/tmp.sR5ge1bkar ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.GLQK5ncgdX ++ mktemp + local LAST_ERR=/tmp/tmp.S1ZfUt682C + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GLQK5ncgdX + cat /tmp/tmp.S1ZfUt682C + rm /tmp/tmp.GLQK5ncgdX /tmp/tmp.S1ZfUt682C + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2SmkihUiPk +++ mktemp ++ local LAST_ERR=/tmp/tmp.DTgwY4zrI2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2SmkihUiPk ++ cat /tmp/tmp.DTgwY4zrI2 ++ rm /tmp/tmp.2SmkihUiPk /tmp/tmp.DTgwY4zrI2 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jJLIKY8ap7 ++ mktemp + local LAST_ERR=/tmp/tmp.NqEDWHYMMs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jJLIKY8ap7 + cat /tmp/tmp.NqEDWHYMMs + rm /tmp/tmp.jJLIKY8ap7 /tmp/tmp.NqEDWHYMMs + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ sed 's|s3://||' ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' +++ mktemp ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.dtccqYOemh +++ mktemp ++ local LAST_ERR=/tmp/tmp.jdXrL5QpnY ++ local exit_status=0 ++ local timeout=4 ++ sed -e 's/.json$//' +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dtccqYOemh ++ cat /tmp/tmp.jdXrL5QpnY ++ rm /tmp/tmp.dtccqYOemh /tmp/tmp.jdXrL5QpnY ++ return 0 + backup_dest_minio=operator-testing/2024-07-31T15:03:04Z + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2024-07-31T15:03:04Z/rs0/ + grep myApp.test.gz ++ mktemp + local LAST_OUT=/tmp/tmp.57OyQNw4Cl ++ mktemp + local LAST_ERR=/tmp/tmp.0ce6ZBBjs6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2024-07-31T15:03:04Z/rs0/ + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.57OyQNw4Cl + cat /tmp/tmp.0ce6ZBBjs6 + rm /tmp/tmp.57OyQNw4Cl /tmp/tmp.0ce6ZBBjs6 + return 0 2024-07-31 15:03:08 56 myApp.test.gz + run_recovery_check backup-minio some-name-rs0 + local backup=backup-minio + local cluster=some-name-rs0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-29329 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.geZSC9jPSm +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZCZVWCS6M6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.geZSC9jPSm ++ cat /tmp/tmp.ZCZVWCS6M6 ++ rm /tmp/tmp.geZSC9jPSm /tmp/tmp.ZCZVWCS6M6 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.m6w3WYApzn ++ mktemp + local LAST_ERR=/tmp/tmp.6PzUqN9Rlo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.m6w3WYApzn Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d76bed45-1169-4c09-9e49-b6814fe051de") } Percona Server for MongoDB server version: v7.0.12-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.6PzUqN9Rlo + rm /tmp/tmp.m6w3WYApzn /tmp/tmp.6PzUqN9Rlo + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-29329 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-29329 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iWaaVAMdxT +++ mktemp ++ local LAST_ERR=/tmp/tmp.sDwvBZEjPK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iWaaVAMdxT ++ cat /tmp/tmp.sDwvBZEjPK ++ rm /tmp/tmp.iWaaVAMdxT /tmp/tmp.sDwvBZEjPK ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.42zEUWaPDk ++ mktemp + local LAST_ERR=/tmp/tmp.xjUsKTrx4x + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.42zEUWaPDk + cat /tmp/tmp.xjUsKTrx4x + rm /tmp/tmp.42zEUWaPDk /tmp/tmp.xjUsKTrx4x + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.11cGdhxzv9/find-2nd + run_restore backup-minio + local backup_name=backup-minio + /usr/bin/sed -e 's/name:/name: restore-backup-minio/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/restore.yml + /usr/bin/sed -e 's/backupName:/backupName: backup-minio/' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.C3kH5SID4M ++ mktemp + local LAST_ERR=/tmp/tmp.m1e5rZ1RS1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C3kH5SID4M perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created + cat /tmp/tmp.m1e5rZ1RS1 + rm /tmp/tmp.C3kH5SID4M /tmp/tmp.m1e5rZ1RS1 + return 0 + wait_restore backup-minio some-name + local backup_name=backup-minio + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/backup-minio to reach ready state........... + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OHInXVfWSV +++ mktemp ++ local LAST_ERR=/tmp/tmp.bjLiDDVRzv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OHInXVfWSV ++ cat /tmp/tmp.bjLiDDVRzv ++ rm /tmp/tmp.OHInXVfWSV /tmp/tmp.bjLiDDVRzv ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OW5js980Gm +++ mktemp ++ local LAST_ERR=/tmp/tmp.dZwR1TXYf9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OW5js980Gm ++ cat /tmp/tmp.dZwR1TXYf9 ++ rm /tmp/tmp.OW5js980Gm /tmp/tmp.dZwR1TXYf9 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.oJUM2jp44h ++ mktemp + local LAST_ERR=/tmp/tmp.CIebKsof5A + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oJUM2jp44h + cat /tmp/tmp.CIebKsof5A + rm /tmp/tmp.oJUM2jp44h /tmp/tmp.CIebKsof5A + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1As2oG3Lqm +++ mktemp ++ local LAST_ERR=/tmp/tmp.vYV8AvmiaI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1As2oG3Lqm ++ cat /tmp/tmp.vYV8AvmiaI ++ rm /tmp/tmp.1As2oG3Lqm /tmp/tmp.vYV8AvmiaI ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ZaRkBg5wkg ++ mktemp + local LAST_ERR=/tmp/tmp.wgYPmaDRti + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZaRkBg5wkg + cat /tmp/tmp.wgYPmaDRti + rm /tmp/tmp.ZaRkBg5wkg /tmp/tmp.wgYPmaDRti + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dbbhFLO8du +++ mktemp ++ local LAST_ERR=/tmp/tmp.VqSBlBVYSE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dbbhFLO8du ++ cat /tmp/tmp.VqSBlBVYSE ++ rm /tmp/tmp.dbbhFLO8du /tmp/tmp.VqSBlBVYSE ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.JGRC3f6eRK ++ mktemp + local LAST_ERR=/tmp/tmp.iB03yFUMNw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JGRC3f6eRK + cat /tmp/tmp.iB03yFUMNw + rm /tmp/tmp.JGRC3f6eRK /tmp/tmp.iB03yFUMNw + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + desc 'restore from backup source, with storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, with storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|azure://||' ++ sed 's|s3://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TG5LB7FGz7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AvN4q0CY5G ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TG5LB7FGz7 ++ cat /tmp/tmp.AvN4q0CY5G ++ rm /tmp/tmp.TG5LB7FGz7 /tmp/tmp.AvN4q0CY5G ++ return 0 + backup_dest_minio=operator-testing/2024-07-31T15:03:04Z + run_recovery_check_bkp_source backup-minio operator-testing/2024-07-31T15:03:04Z some-name-rs0 backup-minio-source-0 + local backup=backup-minio + local backup_dest=operator-testing/2024-07-31T15:03:04Z + local cluster=some-name-rs0 + local source=backup-minio-source-0 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-29329 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hGGshkeUGH +++ mktemp ++ local LAST_ERR=/tmp/tmp.TfMHT0bEeP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hGGshkeUGH ++ cat /tmp/tmp.TfMHT0bEeP ++ rm /tmp/tmp.hGGshkeUGH /tmp/tmp.TfMHT0bEeP ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Ln0SVncUQg ++ mktemp + local LAST_ERR=/tmp/tmp.g14KxD07nw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ln0SVncUQg Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("a860c496-9f31-4413-bff9-ecb65857fbab") } Percona Server for MongoDB server version: v7.0.12-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.g14KxD07nw + rm /tmp/tmp.Ln0SVncUQg /tmp/tmp.g14KxD07nw + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-29329 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-29329 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o9olKBtWbo +++ mktemp ++ local LAST_ERR=/tmp/tmp.xLamM4meGD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.o9olKBtWbo ++ cat /tmp/tmp.xLamM4meGD ++ rm /tmp/tmp.o9olKBtWbo /tmp/tmp.xLamM4meGD ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.oyF314QH85 ++ mktemp + local LAST_ERR=/tmp/tmp.e8GM3Yyg87 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.oyF314QH85 + cat /tmp/tmp.e8GM3Yyg87 + rm /tmp/tmp.oyF314QH85 /tmp/tmp.e8GM3Yyg87 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.11cGdhxzv9/find-2nd + run_restore_backupsource backup-minio-source-0 operator-testing/2024-07-31T15:03:04Z + local backupName=backup-minio-source-0 + local backupDest=operator-testing/2024-07-31T15:03:04Z + local storageName= + desc 'run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2024-07-31T15:03:04Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-0 from backup backup-minio-source-0 destination is operator-testing/2024-07-31T15:03:04Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/restore-backupsource.yml + /usr/bin/sed -e 's/name:/name: restore-backup-minio-source-0/' + kubectl_bin apply -f - ++ mktemp + /usr/bin/sed -e /storageName/d + /usr/bin/sed -e 's|BACKUP-NAME|operator-testing/2024-07-31T15:03:04Z|' + local LAST_OUT=/tmp/tmp.VGF610UFFi ++ mktemp + local LAST_ERR=/tmp/tmp.JhrBAq8oWv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VGF610UFFi perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-0 created + cat /tmp/tmp.JhrBAq8oWv + rm /tmp/tmp.VGF610UFFi /tmp/tmp.JhrBAq8oWv + return 0 + return + wait_restore backup-minio-source-0 some-name + local backup_name=backup-minio-source-0 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/backup-minio-source-0 to reach ready state........... + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RBRKJnWfyy +++ mktemp ++ local LAST_ERR=/tmp/tmp.oUVwuHjNAm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RBRKJnWfyy ++ cat /tmp/tmp.oUVwuHjNAm ++ rm /tmp/tmp.RBRKJnWfyy /tmp/tmp.oUVwuHjNAm ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8UvSjDsZqY +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_ERR=/tmp/tmp.ptJNhKS3nc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8UvSjDsZqY ++ cat /tmp/tmp.ptJNhKS3nc ++ rm /tmp/tmp.8UvSjDsZqY /tmp/tmp.ptJNhKS3nc ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Etp4vf3EU0 ++ mktemp + local LAST_ERR=/tmp/tmp.Nntu09lc1K + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Etp4vf3EU0 + cat /tmp/tmp.Nntu09lc1K + rm /tmp/tmp.Etp4vf3EU0 /tmp/tmp.Nntu09lc1K + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Wjn8gh8gvY +++ mktemp ++ local LAST_ERR=/tmp/tmp.aeL1AiS6dg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Wjn8gh8gvY ++ cat /tmp/tmp.aeL1AiS6dg ++ rm /tmp/tmp.Wjn8gh8gvY /tmp/tmp.aeL1AiS6dg ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Kn2RyQroCV ++ mktemp + local LAST_ERR=/tmp/tmp.WrE6N3vIyX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Kn2RyQroCV + cat /tmp/tmp.WrE6N3vIyX + rm /tmp/tmp.Kn2RyQroCV /tmp/tmp.WrE6N3vIyX + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6wtPQvNPuD +++ mktemp ++ local LAST_ERR=/tmp/tmp.BkuIM8g342 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6wtPQvNPuD ++ cat /tmp/tmp.BkuIM8g342 ++ rm /tmp/tmp.6wtPQvNPuD /tmp/tmp.BkuIM8g342 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.nGQmIA06KH ++ mktemp + local LAST_ERR=/tmp/tmp.KBpWrdH85n + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nGQmIA06KH + cat /tmp/tmp.KBpWrdH85n + rm /tmp/tmp.nGQmIA06KH /tmp/tmp.KBpWrdH85n + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + desc 'restore from backup source, no storageName -- minio' + set +o xtrace ----------------------------------------------------------------------------------- restore from backup source, no storageName -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest backup-minio ++ local backup_name=backup-minio ++ kubectl_bin get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2s3NE6tX1P +++ mktemp ++ local LAST_ERR=/tmp/tmp.8MOxUC0Vmw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup backup-minio -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2s3NE6tX1P ++ cat /tmp/tmp.8MOxUC0Vmw ++ rm /tmp/tmp.2s3NE6tX1P /tmp/tmp.8MOxUC0Vmw ++ return 0 + backup_dest_minio=operator-testing/2024-07-31T15:03:04Z + run_recovery_check_bkp_source backup-minio operator-testing/2024-07-31T15:03:04Z some-name-rs0 backup-minio-source-1 + local backup=backup-minio + local backup_dest=operator-testing/2024-07-31T15:03:04Z + local cluster=some-name-rs0 + local source=backup-minio-source-1 + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.demand-backup-29329 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RIHXdFTFnU +++ mktemp ++ local LAST_ERR=/tmp/tmp.HPZ6zdDyVX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RIHXdFTFnU ++ cat /tmp/tmp.HPZ6zdDyVX ++ rm /tmp/tmp.RIHXdFTFnU /tmp/tmp.HPZ6zdDyVX ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.kRdjSLm0Tm ++ mktemp + local LAST_ERR=/tmp/tmp.a0vz6CFpSv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kRdjSLm0Tm Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("2714ffd0-196d-4bb0-90d3-460e18ca01f0") } Percona Server for MongoDB server version: v7.0.12-7 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.a0vz6CFpSv + rm /tmp/tmp.kRdjSLm0Tm /tmp/tmp.a0vz6CFpSv + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0.demand-backup-29329 -2nd .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local postfix=-2nd + local suffix=.svc.cluster.local + local database=myApp + local collection=test + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0.demand-backup-29329 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qz6jzSGyfM +++ mktemp ++ local LAST_ERR=/tmp/tmp.mB18WpbTgt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Qz6jzSGyfM ++ cat /tmp/tmp.mB18WpbTgt ++ rm /tmp/tmp.Qz6jzSGyfM /tmp/tmp.mB18WpbTgt ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.pMSkL6W70h ++ mktemp + local LAST_ERR=/tmp/tmp.2Cja1gh2vj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pMSkL6W70h + cat /tmp/tmp.2Cja1gh2vj + rm /tmp/tmp.pMSkL6W70h /tmp/tmp.2Cja1gh2vj + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find-2nd.json /tmp/tmp.11cGdhxzv9/find-2nd + run_restore_backupsource backup-minio-source-1 operator-testing/2024-07-31T15:03:04Z + local backupName=backup-minio-source-1 + local backupDest=operator-testing/2024-07-31T15:03:04Z + local storageName= + desc 'run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2024-07-31T15:03:04Z' + set +o xtrace ----------------------------------------------------------------------------------- run restore restore-backup-minio-source-1 from backup backup-minio-source-1 destination is operator-testing/2024-07-31T15:03:04Z ----------------------------------------------------------------------------------- + '[' -z '' ']' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/restore-backupsource.yml + /usr/bin/sed -e 's|BACKUP-NAME|operator-testing/2024-07-31T15:03:04Z|' + kubectl_bin apply -f - + /usr/bin/sed -e /storageName/d + /usr/bin/sed -e 's/name:/name: restore-backup-minio-source-1/' ++ mktemp + local LAST_OUT=/tmp/tmp.le1hBpsNgr ++ mktemp + local LAST_ERR=/tmp/tmp.gX7J43K9GA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.le1hBpsNgr perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio-source-1 created + cat /tmp/tmp.gX7J43K9GA + rm /tmp/tmp.le1hBpsNgr /tmp/tmp.gX7J43K9GA + return 0 + return + wait_restore backup-minio-source-1 some-name + local backup_name=backup-minio-source-1 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/backup-minio-source-1 to reach ready state........... + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k2vLC5rumM +++ mktemp ++ local LAST_ERR=/tmp/tmp.pJsftcBLPP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.k2vLC5rumM ++ cat /tmp/tmp.pJsftcBLPP ++ rm /tmp/tmp.k2vLC5rumM /tmp/tmp.pJsftcBLPP ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + simple_data_check some-name-rs0 3 0 + local cluster_name=some-name-rs0 + let last_pod=3-1 + local isSharded=0 + local cluster_pfx= + '[' 0 -eq 1 ']' ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.sLNYgIof9L +++ mktemp ++ local LAST_ERR=/tmp/tmp.1tyl24oJ98 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sLNYgIof9L ++ cat /tmp/tmp.1tyl24oJ98 ++ rm /tmp/tmp.sLNYgIof9L /tmp/tmp.1tyl24oJ98 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.GqG9KCNnIl ++ mktemp + local LAST_ERR=/tmp/tmp.OZwVj2dj95 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GqG9KCNnIl + cat /tmp/tmp.OZwVj2dj95 + rm /tmp/tmp.GqG9KCNnIl /tmp/tmp.OZwVj2dj95 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PiA7n9Fdhv +++ mktemp ++ local LAST_ERR=/tmp/tmp.LniMgw6a61 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PiA7n9Fdhv ++ cat /tmp/tmp.LniMgw6a61 ++ rm /tmp/tmp.PiA7n9Fdhv /tmp/tmp.LniMgw6a61 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jYXUehciCF ++ mktemp + local LAST_ERR=/tmp/tmp.6nLdiYzmPr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jYXUehciCF + cat /tmp/tmp.6nLdiYzmPr + rm /tmp/tmp.jYXUehciCF /tmp/tmp.6nLdiYzmPr + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + for i in '$(seq 0 $last_pod)' + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WjewRHcfXW +++ mktemp ++ local LAST_ERR=/tmp/tmp.yawek4fkCi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WjewRHcfXW ++ cat /tmp/tmp.yawek4fkCi ++ rm /tmp/tmp.WjewRHcfXW /tmp/tmp.yawek4fkCi ++ return 0 + local client_container=psmdb-client-6c585f8dbd-x5n42 + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.WraA1TgUcj ++ mktemp + local LAST_ERR=/tmp/tmp.P1wXk5u9Gj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-x5n42 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.demand-backup-29329.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WraA1TgUcj + cat /tmp/tmp.P1wXk5u9Gj + rm /tmp/tmp.WraA1TgUcj /tmp/tmp.P1wXk5u9Gj + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/compare/find.json /tmp/tmp.11cGdhxzv9/find + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.GUDCIVAiUe ++ mktemp + local LAST_ERR=/tmp/tmp.Xq4KELc63k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GUDCIVAiUe perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted + cat /tmp/tmp.Xq4KELc63k + rm /tmp/tmp.GUDCIVAiUe /tmp/tmp.Xq4KELc63k + return 0 ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ grep -c operator-testing/2024-07-31T15:03:04Z +++ mktemp ++ cat ++ local LAST_OUT=/tmp/tmp.rdyMsqLZJd +++ mktemp ++ local LAST_ERR=/tmp/tmp.WJE8fl8j2M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rdyMsqLZJd ++ cat /tmp/tmp.WJE8fl8j2M ++ rm /tmp/tmp.rdyMsqLZJd /tmp/tmp.WJE8fl8j2M ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2024-07-31T15:01:39Z aws-s3 + path=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2024-07-31T15:01:39Z + storage_name=aws-s3 + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2024-07-31T15:01:39Z + [[ 403 -eq 403 ]] + check_backup_deletion https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z gcp-cs + path=https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z + storage_name=gcp-cs + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z + [[ 404 -eq 404 ]] + check_backup_deletion https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z azure-blob + path=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z + storage_name=azure-blob + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z + [[ 404 -eq 404 ]] + desc 'checking backup deletion without cr' + set +o xtrace ----------------------------------------------------------------------------------- checking backup deletion without cr ----------------------------------------------------------------------------------- + run_backup minio + local storage=minio + local backup_name=backup-minio + desc 'run backup backup-minio' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-minio" | .spec.storageName = "minio"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/backup-minio.yml ++ mktemp + local LAST_OUT=/tmp/tmp.77w97ePbnt ++ mktemp + local LAST_ERR=/tmp/tmp.KzxL2yHkJR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.77w97ePbnt perconaservermongodbbackup.psmdb.percona.com/backup-minio created + cat /tmp/tmp.KzxL2yHkJR + rm /tmp/tmp.77w97ePbnt /tmp/tmp.KzxL2yHkJR + return 0 + '[' -z '' ']' + run_backup aws-s3 + local storage=aws-s3 + local backup_name=backup-aws-s3 + desc 'run backup backup-aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-aws-s3" | .spec.storageName = "aws-s3"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/backup-aws-s3.yml ++ mktemp + local LAST_OUT=/tmp/tmp.HqDiBCvSzU ++ mktemp + local LAST_ERR=/tmp/tmp.noyOY1JHil + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HqDiBCvSzU perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created + cat /tmp/tmp.noyOY1JHil + rm /tmp/tmp.HqDiBCvSzU /tmp/tmp.noyOY1JHil + return 0 + run_backup gcp-cs + local storage=gcp-cs + local backup_name=backup-gcp-cs + desc 'run backup backup-gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- + yq eval '.metadata.name = "backup-gcp-cs" | .spec.storageName = "gcp-cs"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/backup-gcp-cs.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Z6MfVhGS6S ++ mktemp + local LAST_ERR=/tmp/tmp.AvzVwyR2D3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Z6MfVhGS6S perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created + cat /tmp/tmp.AvzVwyR2D3 + rm /tmp/tmp.Z6MfVhGS6S /tmp/tmp.AvzVwyR2D3 + return 0 + run_backup azure-blob + local storage=azure-blob + local backup_name=backup-azure-blob + desc 'run backup backup-azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval '.metadata.name = "backup-azure-blob" | .spec.storageName = "azure-blob"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/e2e-tests/demand-backup/conf/backup-azure-blob.yml ++ mktemp + local LAST_OUT=/tmp/tmp.LAC8Y50QUe ++ mktemp + local LAST_ERR=/tmp/tmp.5yjxqcqR9B + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LAC8Y50QUe perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created + cat /tmp/tmp.5yjxqcqR9B + rm /tmp/tmp.LAC8Y50QUe /tmp/tmp.5yjxqcqR9B + return 0 + wait_backup backup-minio + local backup_name=backup-minio + set +o xtrace backup-minio............. + '[' -z '' ']' + wait_backup backup-aws-s3 + local backup_name=backup-aws-s3 + set +o xtrace backup-aws-s3............... + wait_backup backup-gcp-cs + local backup_name=backup-gcp-cs + set +o xtrace backup-gcp-cs................................. + wait_backup backup-azure-blob + local backup_name=backup-azure-blob + set +o xtrace backup-azure-blob. + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.I5aaiVEaht ++ mktemp + local LAST_ERR=/tmp/tmp.x4JjVGLqtS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.I5aaiVEaht perconaservermongodb.psmdb.percona.com "some-name" deleted + cat /tmp/tmp.x4JjVGLqtS + rm /tmp/tmp.I5aaiVEaht /tmp/tmp.x4JjVGLqtS + return 0 + sleep 60 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- + kubectl_bin delete psmdb-backup --all ++ mktemp + local LAST_OUT=/tmp/tmp.FKULvoS1k0 ++ mktemp + local LAST_ERR=/tmp/tmp.KiB6Eq17Na + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb-backup --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FKULvoS1k0 perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted + cat /tmp/tmp.KiB6Eq17Na + rm /tmp/tmp.FKULvoS1k0 /tmp/tmp.KiB6Eq17Na + return 0 ++ kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ grep -c operator-testing/2024-07-31T15:03:04Z ++ cat +++ mktemp ++ local LAST_OUT=/tmp/tmp.O6zpuUv5Ua +++ mktemp ++ local LAST_ERR=/tmp/tmp.xm6qHVEBlw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/ ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.O6zpuUv5Ua ++ cat /tmp/tmp.xm6qHVEBlw If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-29329 ++ rm /tmp/tmp.O6zpuUv5Ua /tmp/tmp.xm6qHVEBlw ++ return 0 + backup_exists=0 + [[ 0 -eq 1 ]] + '[' -z '' ']' + check_backup_deletion https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2024-07-31T15:01:39Z aws-s3 + path=https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2024-07-31T15:01:39Z + storage_name=aws-s3 + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://s3.amazonaws.com/operator-testing/psmdb-demand-backup/2024-07-31T15:01:39Z + [[ 403 -eq 403 ]] + check_backup_deletion https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z gcp-cs + path=https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z + storage_name=gcp-cs + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://storage.googleapis.com/operator-testing/psmdb-demand-backup/2024-07-31T15:02:11Z + [[ 404 -eq 404 ]] + check_backup_deletion https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z azure-blob + path=https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z + storage_name=azure-blob + retry=0 ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z + [[ 404 -eq 403 ]] ++ curl -sw '%{http_code}' -o /dev/null https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-demand-backup/2024-07-31T15:03:35Z + [[ 404 -eq 404 ]] + desc 'check for passwords leak' + set +o xtrace ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- + check_passwords_leak + local secrets + local passwords + local pods ++ kubectl_bin get secrets -o json ++ jq -r '.items[].data | to_entries | .[] | select(.key | (contains("_PASSWORD"))) | .value' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pkw2wG822I +++ mktemp ++ local LAST_ERR=/tmp/tmp.OZv0ysDgep ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pkw2wG822I ++ cat /tmp/tmp.OZv0ysDgep ++ rm /tmp/tmp.pkw2wG822I /tmp/tmp.OZv0ysDgep ++ return 0 + secrets='YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2' + echo secrets=YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 secrets=YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo ++ for i in '$secrets' ++ base64 -d ++ echo + passwords='backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2' + echo passwords=backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 ++ kubectl_bin get pods -o name ++ awk -F / '{print $2}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RzwV1iI2EC +++ mktemp ++ local LAST_ERR=/tmp/tmp.YRW91EYv5h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.RzwV1iI2EC ++ cat /tmp/tmp.YRW91EYv5h ++ rm /tmp/tmp.RzwV1iI2EC /tmp/tmp.YRW91EYv5h ++ return 0 + pods='minio-service-6ff7647778-4ck4c psmdb-client-6c585f8dbd-x5n42' + echo pods=minio-service-6ff7647778-4ck4c psmdb-client-6c585f8dbd-x5n42 pods=minio-service-6ff7647778-4ck4c psmdb-client-6c585f8dbd-x5n42 + collect_logs demand-backup-29329 + local containers + local count + NS=demand-backup-29329 + for p in '$pods' ++ kubectl_bin -n demand-backup-29329 get pod minio-service-6ff7647778-4ck4c -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4gbhMfyMp4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.w7VUZVQOAI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n demand-backup-29329 get pod minio-service-6ff7647778-4ck4c -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4gbhMfyMp4 ++ cat /tmp/tmp.w7VUZVQOAI ++ rm /tmp/tmp.4gbhMfyMp4 /tmp/tmp.w7VUZVQOAI ++ return 0 + containers=minio + for c in '$containers' + [[ minio =~ pmm ]] + kubectl_bin -n demand-backup-29329 logs minio-service-6ff7647778-4ck4c -c minio ++ mktemp + local LAST_OUT=/tmp/tmp.J2Gr5n3M7W ++ mktemp + local LAST_ERR=/tmp/tmp.NVfelQM0qc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n demand-backup-29329 logs minio-service-6ff7647778-4ck4c -c minio + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.J2Gr5n3M7W + cat /tmp/tmp.NVfelQM0qc + rm /tmp/tmp.J2Gr5n3M7W /tmp/tmp.NVfelQM0qc + return 0 + echo logs saved in: /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt logs saved in: /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-minio-service-6ff7647778-4ck4c-minio.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + for p in '$pods' ++ kubectl_bin -n demand-backup-29329 get pod psmdb-client-6c585f8dbd-x5n42 -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wpEzVdJpSY +++ mktemp ++ local LAST_ERR=/tmp/tmp.4i6uDvAwNf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n demand-backup-29329 get pod psmdb-client-6c585f8dbd-x5n42 -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wpEzVdJpSY ++ cat /tmp/tmp.4i6uDvAwNf ++ rm /tmp/tmp.wpEzVdJpSY /tmp/tmp.4i6uDvAwNf ++ return 0 + containers=psmdb-client + for c in '$containers' + [[ psmdb-client =~ pmm ]] + kubectl_bin -n demand-backup-29329 logs psmdb-client-6c585f8dbd-x5n42 -c psmdb-client ++ mktemp + local LAST_OUT=/tmp/tmp.SksrHRp4Ly ++ mktemp + local LAST_ERR=/tmp/tmp.VB790C8b2R + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n demand-backup-29329 logs psmdb-client-6c585f8dbd-x5n42 -c psmdb-client + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SksrHRp4Ly + cat /tmp/tmp.VB790C8b2R + rm /tmp/tmp.SksrHRp4Ly /tmp/tmp.VB790C8b2R + return 0 + echo logs saved in: /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt logs saved in: /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-psmdb-client-6c585f8dbd-x5n42-psmdb-client.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + '[' -n psmdb-operator ']' ++ kubectl_bin -n psmdb-operator get pods -o name ++ awk -F / '{print $2}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uigQPUckqZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.9cMlmGs4Ch ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n psmdb-operator get pods -o name ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uigQPUckqZ ++ cat /tmp/tmp.9cMlmGs4Ch ++ rm /tmp/tmp.uigQPUckqZ /tmp/tmp.9cMlmGs4Ch ++ return 0 + pods=percona-server-mongodb-operator-bdc5b774b-nl5sk + collect_logs psmdb-operator + local containers + local count + NS=psmdb-operator + for p in '$pods' ++ kubectl_bin -n psmdb-operator get pod percona-server-mongodb-operator-bdc5b774b-nl5sk -o 'jsonpath={.spec.containers[*].name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qQ8uJ8kfRU +++ mktemp ++ local LAST_ERR=/tmp/tmp.RK6Xc8Mzua ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl -n psmdb-operator get pod percona-server-mongodb-operator-bdc5b774b-nl5sk -o 'jsonpath={.spec.containers[*].name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qQ8uJ8kfRU ++ cat /tmp/tmp.RK6Xc8Mzua ++ rm /tmp/tmp.qQ8uJ8kfRU /tmp/tmp.RK6Xc8Mzua ++ return 0 + containers=percona-server-mongodb-operator + for c in '$containers' + [[ percona-server-mongodb-operator =~ pmm ]] + kubectl_bin -n psmdb-operator logs percona-server-mongodb-operator-bdc5b774b-nl5sk -c percona-server-mongodb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ZaOfmsapQO ++ mktemp + local LAST_ERR=/tmp/tmp.xd3BbHlVAb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n psmdb-operator logs percona-server-mongodb-operator-bdc5b774b-nl5sk -c percona-server-mongodb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZaOfmsapQO + cat /tmp/tmp.xd3BbHlVAb + rm /tmp/tmp.ZaOfmsapQO /tmp/tmp.xd3BbHlVAb + return 0 + echo logs saved in: /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt logs saved in: /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- backup123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- clusterMonitor123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- databaseAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- userAdmin123456 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- YmFja3VwMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3RlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- Y2x1c3Rlck1vbml0b3IxMjM0NTY= /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + for pass in '$passwords' ++ grep -c --fixed-strings -- dXNlckFkbWluMTIzNDU2 /tmp/tmp.11cGdhxzv9/logs_output-percona-server-mongodb-operator-bdc5b774b-nl5sk-percona-server-mongodb-operator.txt ++ : + count=0 + [[ 0 != 0 ]] + echo + destroy demand-backup-29329 + local namespace=demand-backup-29329 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.c9Z3Z9ejsV ++ mktemp + local LAST_ERR=/tmp/tmp.Cq4ibKHYP6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.c9Z3Z9ejsV customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Cq4ibKHYP6 + rm /tmp/tmp.c9Z3Z9ejsV /tmp/tmp.Cq4ibKHYP6 + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' E0731 15:14:33.569193 9639 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-11-0: the server could not find the requested resource E0731 15:14:33.569814 9639 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1: the server could not find the requested resource E0731 15:14:33.570153 9639 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-12-0: the server could not find the requested resource E0731 15:14:33.570895 9639 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-10-0: the server could not find the requested resource error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.JPOdnrrSA1 ++ mktemp + local LAST_ERR=/tmp/tmp.tT5N3ay0B6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JPOdnrrSA1 + cat /tmp/tmp.tT5N3ay0B6 + rm /tmp/tmp.JPOdnrrSA1 /tmp/tmp.tT5N3ay0B6 + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.50rox6AwAh ++ mktemp + local LAST_ERR=/tmp/tmp.ZuFoR3exFr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.50rox6AwAh + cat /tmp/tmp.ZuFoR3exFr + rm /tmp/tmp.50rox6AwAh /tmp/tmp.ZuFoR3exFr + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.T848tVfgYb ++ mktemp + local LAST_ERR=/tmp/tmp.zoy8YwOeMP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.T848tVfgYb + cat /tmp/tmp.zoy8YwOeMP + rm /tmp/tmp.T848tVfgYb /tmp/tmp.zoy8YwOeMP + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.LCVilp1h9k ++ mktemp + local LAST_ERR=/tmp/tmp.iIYEjqJnv9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1612/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LCVilp1h9k clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.iIYEjqJnv9 + rm /tmp/tmp.LCVilp1h9k /tmp/tmp.iIYEjqJnv9 + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.SWTjw4hLw9 ++ mktemp + local LAST_ERR=/tmp/tmp.88UFXtghDz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.SWTjw4hLw9 + cat /tmp/tmp.88UFXtghDz Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.SWTjw4hLw9 + cat /tmp/tmp.88UFXtghDz Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.SWTjw4hLw9 + cat /tmp/tmp.88UFXtghDz Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.SWTjw4hLw9 + cat /tmp/tmp.88UFXtghDz Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.SWTjw4hLw9 /tmp/tmp.88UFXtghDz + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace demand-backup-29329 + rm -rf /tmp/tmp.11cGdhxzv9 + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.3EJDBTt8ug + local LAST_OUT=/tmp/tmp.2ytBtgpDF5 + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.UHi0KcSTuo + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.I3lrjnHXIi + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace demand-backup-29329 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator