Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/logs/scheduled-backup.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra scheduled-backup-2969 + local ns=scheduled-backup-2969 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.UVNhdn08zK ++ mktemp + local LAST_ERR=/tmp/tmp.hA9q0jQEOl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UVNhdn08zK customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.hA9q0jQEOl + rm /tmp/tmp.UVNhdn08zK /tmp/tmp.hA9q0jQEOl + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.HpHd0L3iKx ++ mktemp + local LAST_ERR=/tmp/tmp.3ou7k5olRj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HpHd0L3iKx + cat /tmp/tmp.3ou7k5olRj + rm /tmp/tmp.HpHd0L3iKx /tmp/tmp.3ou7k5olRj + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.NgmnGDDSV0 ++ mktemp + local LAST_ERR=/tmp/tmp.7P1iKLjuLe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NgmnGDDSV0 + cat /tmp/tmp.7P1iKLjuLe + rm /tmp/tmp.NgmnGDDSV0 /tmp/tmp.7P1iKLjuLe + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.XCvAyLZR3W ++ mktemp + local LAST_ERR=/tmp/tmp.V7YaPGoVzS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XCvAyLZR3W + cat /tmp/tmp.V7YaPGoVzS + rm /tmp/tmp.XCvAyLZR3W /tmp/tmp.V7YaPGoVzS + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.Dh3gj5ATp9 ++ mktemp + local LAST_ERR=/tmp/tmp.cR6dSnkrSu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Dh3gj5ATp9 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.cR6dSnkrSu + rm /tmp/tmp.Dh3gj5ATp9 /tmp/tmp.cR6dSnkrSu + return 0 + check_crd_for_deletion PR-1567-b27e0b5e + local git_tag=PR-1567-b27e0b5e ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1567-b27e0b5e/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' ++ /usr/bin/sed s/---//g + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eb8nUbmSCh +++ mktemp ++ local LAST_ERR=/tmp/tmp.ohctmcPRHU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.eb8nUbmSCh ++ cat /tmp/tmp.ohctmcPRHU Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.eb8nUbmSCh ++ cat /tmp/tmp.ohctmcPRHU Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.eb8nUbmSCh ++ cat /tmp/tmp.ohctmcPRHU Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.eb8nUbmSCh ++ cat /tmp/tmp.ohctmcPRHU Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.eb8nUbmSCh /tmp/tmp.ohctmcPRHU ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp ++ mktemp + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + local LAST_OUT=/tmp/tmp.jyvtVyZRB4 + local LAST_OUT=/tmp/tmp.v058L7S48k ++ mktemp + local LAST_ERR=/tmp/tmp.Sp4MKnMOAx + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.4LoWsBsHID + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.v058L7S48k + cat /tmp/tmp.4LoWsBsHID + rm /tmp/tmp.v058L7S48k /tmp/tmp.4LoWsBsHID + return 0 namespace "scheduled-backup-4343" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jyvtVyZRB4 namespace "psmdb-operator" deleted + cat /tmp/tmp.Sp4MKnMOAx + rm /tmp/tmp.jyvtVyZRB4 /tmp/tmp.Sp4MKnMOAx + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.3KWvRRfZRF ++ mktemp + local LAST_ERR=/tmp/tmp.HQEQaAcolG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3KWvRRfZRF + cat /tmp/tmp.HQEQaAcolG + rm /tmp/tmp.3KWvRRfZRF /tmp/tmp.HQEQaAcolG + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.5Q5SYnnO9D ++ mktemp + local LAST_ERR=/tmp/tmp.uC9K1wzUY9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5Q5SYnnO9D namespace/psmdb-operator created + cat /tmp/tmp.uC9K1wzUY9 + rm /tmp/tmp.5Q5SYnnO9D /tmp/tmp.uC9K1wzUY9 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.ni6zi4XSN2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cO4diTcaXB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ni6zi4XSN2 ++ cat /tmp/tmp.cO4diTcaXB ++ rm /tmp/tmp.ni6zi4XSN2 /tmp/tmp.cO4diTcaXB ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-6-cluster3 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.KHWI6lNwdS ++ mktemp + local LAST_ERR=/tmp/tmp.RywyVJ23SQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-6-cluster3 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KHWI6lNwdS Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-6-cluster3" modified. + cat /tmp/tmp.RywyVJ23SQ + rm /tmp/tmp.KHWI6lNwdS /tmp/tmp.RywyVJ23SQ + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.CPKEoi6L4R ++ mktemp + local LAST_ERR=/tmp/tmp.6n2NdK54AW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CPKEoi6L4R customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.6n2NdK54AW + rm /tmp/tmp.CPKEoi6L4R /tmp/tmp.6n2NdK54AW + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.8Afutb4nRq ++ mktemp + local LAST_ERR=/tmp/tmp.VH7xdKDkFX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8Afutb4nRq clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.VH7xdKDkFX + rm /tmp/tmp.8Afutb4nRq /tmp/tmp.VH7xdKDkFX + return 0 + kubectl_bin apply -f - + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.PNvQzM97Yq ++ mktemp + local LAST_ERR=/tmp/tmp.fLYlys3TQu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PNvQzM97Yq deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.fLYlys3TQu + rm /tmp/tmp.PNvQzM97Yq /tmp/tmp.fLYlys3TQu + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.4D7VjrqN0U +++ mktemp ++ local LAST_ERR=/tmp/tmp.qQAEaS3FXJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4D7VjrqN0U ++ cat /tmp/tmp.qQAEaS3FXJ ++ rm /tmp/tmp.4D7VjrqN0U /tmp/tmp.qQAEaS3FXJ ++ return 0 + wait_pod percona-server-mongodb-operator-6cfcdf54dd-ml28z + local pod=percona-server-mongodb-operator-6cfcdf54dd-ml28z + set +o xtrace waiting for pod/percona-server-mongodb-operator-6cfcdf54dd-ml28z to be ready.OK + create_namespace scheduled-backup-2969 + local namespace=scheduled-backup-2969 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces scheduled-backup-2969' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces scheduled-backup-2969 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace scheduled-backup-2969 --ignore-not-found + xargs kubectl delete ns ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.hPjI3Jz0FV ++ mktemp + local LAST_ERR=/tmp/tmp.Uy9UIKlZri + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + local LAST_OUT=/tmp/tmp.z4e8FSgw8F ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace scheduled-backup-2969 --ignore-not-found + local LAST_ERR=/tmp/tmp.VqApFmHj5F + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.z4e8FSgw8F + cat /tmp/tmp.VqApFmHj5F + rm /tmp/tmp.z4e8FSgw8F /tmp/tmp.VqApFmHj5F + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hPjI3Jz0FV + cat /tmp/tmp.Uy9UIKlZri + rm /tmp/tmp.hPjI3Jz0FV /tmp/tmp.Uy9UIKlZri + return 0 + kubectl_bin wait --for=delete namespace scheduled-backup-2969 ++ mktemp + local LAST_OUT=/tmp/tmp.U8ykGJB3WP ++ mktemp + local LAST_ERR=/tmp/tmp.NN4l9tneUf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace scheduled-backup-2969 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.U8ykGJB3WP + cat /tmp/tmp.NN4l9tneUf + rm /tmp/tmp.U8ykGJB3WP /tmp/tmp.NN4l9tneUf + return 0 + desc 'create namespace scheduled-backup-2969' + set +o xtrace ----------------------------------------------------------------------------------- create namespace scheduled-backup-2969 ----------------------------------------------------------------------------------- + kubectl_bin create namespace scheduled-backup-2969 ++ mktemp + local LAST_OUT=/tmp/tmp.VR6UNoyMAR ++ mktemp + local LAST_ERR=/tmp/tmp.Ueq5RHIbHi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace scheduled-backup-2969 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VR6UNoyMAR namespace/scheduled-backup-2969 created + cat /tmp/tmp.Ueq5RHIbHi + rm /tmp/tmp.VR6UNoyMAR /tmp/tmp.Ueq5RHIbHi + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.9314hgQFSK +++ mktemp ++ local LAST_ERR=/tmp/tmp.LVBtqFYbaB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9314hgQFSK ++ cat /tmp/tmp.LVBtqFYbaB ++ rm /tmp/tmp.9314hgQFSK /tmp/tmp.LVBtqFYbaB ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-6-cluster3 --namespace=scheduled-backup-2969 ++ mktemp + local LAST_OUT=/tmp/tmp.xWe1syxjmS ++ mktemp + local LAST_ERR=/tmp/tmp.ykgZ2HmhM4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-6-cluster3 --namespace=scheduled-backup-2969 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xWe1syxjmS Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1567-b27e0b5e-6-cluster3" modified. + cat /tmp/tmp.ykgZ2HmhM4 + rm /tmp/tmp.xWe1syxjmS /tmp/tmp.ykgZ2HmhM4 + return 0 + deploy_minio + desc 'install Minio' + set +o xtrace ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- + helm uninstall minio-service Error: uninstall: Release not loaded: minio-service: release: not found + : + helm repo remove minio "minio" has been removed from your repositories + helm repo add minio https://charts.min.io/ "minio" has been added to your repositories + retry 10 60 helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio + local max=10 + local delay=60 + shift 2 + local n=1 + helm install minio-service --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio NAME: minio-service LAST DEPLOYED: Mon Jul 8 17:00:51 2024 NAMESPACE: scheduled-backup-2969 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.scheduled-backup-2969.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace scheduled-backup-2969 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace scheduled-backup-2969 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace scheduled-backup-2969 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace scheduled-backup-2969 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local ++ kubectl_bin get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ahlTlOKUIF +++ mktemp ++ local LAST_ERR=/tmp/tmp.cF3exxPEdM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ahlTlOKUIF ++ cat /tmp/tmp.cF3exxPEdM ++ rm /tmp/tmp.ahlTlOKUIF /tmp/tmp.cF3exxPEdM ++ return 0 + MINIO_POD=minio-service-57dd49b-zc6zq + wait_pod minio-service-57dd49b-zc6zq + local pod=minio-service-57dd49b-zc6zq + set +o xtrace waiting for pod/minio-service-57dd49b-zc6zq to be ready.OK + '[' -n psmdb-operator ']' + kubectl_bin create svc -n psmdb-operator externalname minio-service --external-name=minio-service.scheduled-backup-2969.svc.cluster.local --tcp=9000 ++ mktemp + local LAST_OUT=/tmp/tmp.eCItq2xXTL ++ mktemp + local LAST_ERR=/tmp/tmp.hTJb3I5Joj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create svc -n psmdb-operator externalname minio-service --external-name=minio-service.scheduled-backup-2969.svc.cluster.local --tcp=9000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eCItq2xXTL service/minio-service created + cat /tmp/tmp.hTJb3I5Joj + rm /tmp/tmp.eCItq2xXTL /tmp/tmp.hTJb3I5Joj + return 0 + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' ++ mktemp + local LAST_OUT=/tmp/tmp.scoCNKLRgG ++ mktemp + local LAST_ERR=/tmp/tmp.FnIVFoRstB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.scoCNKLRgG make_bucket: operator-testing pod "aws-cli" deleted + cat /tmp/tmp.FnIVFoRstB + rm /tmp/tmp.scoCNKLRgG /tmp/tmp.FnIVFoRstB + return 0 + label_node ++ kubectl_bin get nodes -o custom-columns=NAME:.metadata.name --no-headers=true ++ head -n1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.aqMa1GoRfE +++ mktemp ++ local LAST_ERR=/tmp/tmp.XGY1wBe1UV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get nodes -o custom-columns=NAME:.metadata.name --no-headers=true ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aqMa1GoRfE ++ cat /tmp/tmp.XGY1wBe1UV ++ rm /tmp/tmp.aqMa1GoRfE /tmp/tmp.XGY1wBe1UV ++ return 0 + LABELED_NODE=gke-jen-psmdb-1567-b27e0-default-pool-b4610de5-6drs + desc 'add labels' + set +o xtrace ----------------------------------------------------------------------------------- add labels ----------------------------------------------------------------------------------- + kubectl_bin label nodes gke-jen-psmdb-1567-b27e0-default-pool-b4610de5-6drs backupWorker=True --overwrite ++ mktemp + local LAST_OUT=/tmp/tmp.AXV7jCkfXT ++ mktemp + local LAST_ERR=/tmp/tmp.mVw7hVti9D + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label nodes gke-jen-psmdb-1567-b27e0-default-pool-b4610de5-6drs backupWorker=True --overwrite + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AXV7jCkfXT node/gke-jen-psmdb-1567-b27e0-default-pool-b4610de5-6drs not labeled + cat /tmp/tmp.mVw7hVti9D + rm /tmp/tmp.AXV7jCkfXT /tmp/tmp.mVw7hVti9D + return 0 + desc 'create PriorityClass' + set +o xtrace ----------------------------------------------------------------------------------- create PriorityClass ----------------------------------------------------------------------------------- + kubectl apply -f - + cat - priorityclass.scheduling.k8s.io/high-priority configured + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ydcA47oXpN ++ mktemp + local LAST_ERR=/tmp/tmp.Nwgm8X2c24 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ydcA47oXpN secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.Nwgm8X2c24 + rm /tmp/tmp.ydcA47oXpN /tmp/tmp.Nwgm8X2c24 + return 0 + apply_s3_storage_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ZHzG8ARcJt ++ mktemp + local LAST_ERR=/tmp/tmp.egrJ5WOMkA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZHzG8ARcJt secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.egrJ5WOMkA + rm /tmp/tmp.ZHzG8ARcJt /tmp/tmp.egrJ5WOMkA + return 0 + cluster=some-name-rs0 + desc 'create first PSMDB cluster some-name-rs0' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster some-name-rs0 ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '.spec.upgradeOptions.apply="Never"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + local LAST_OUT=/tmp/tmp.CR1aAN1zxr + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e"' ++ mktemp + local LAST_ERR=/tmp/tmp.XzJYU973Ws + local exit_status=0 + local timeout=4 + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CR1aAN1zxr perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.XzJYU973Ws + rm /tmp/tmp.CR1aAN1zxr /tmp/tmp.XzJYU973Ws + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready..........OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready........OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BPfNHgRUTi +++ mktemp ++ local LAST_ERR=/tmp/tmp.GHpiaIVURL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BPfNHgRUTi ++ cat /tmp/tmp.GHpiaIVURL ++ rm /tmp/tmp.BPfNHgRUTi /tmp/tmp.GHpiaIVURL ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready......OK ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oImlLwrSBF +++ mktemp ++ local LAST_ERR=/tmp/tmp.YJZFw9ZsgM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oImlLwrSBF ++ cat /tmp/tmp.YJZFw9ZsgM ++ rm /tmp/tmp.oImlLwrSBF /tmp/tmp.YJZFw9ZsgM ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness..... + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-rs0 + local resource=statefulset/some-name-rs0 + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/statefulset_some-name-rs0.yml + local new_result=/tmp/tmp.qLgtWfiE09/statefulset_some-name-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/statefulset_some-name-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/some-name-rs0 ++ mktemp + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("scheduled-backup-2969", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.ObzHY2n4SH ++ mktemp + local LAST_ERR=/tmp/tmp.mF9mi6voUj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ObzHY2n4SH + cat /tmp/tmp.mF9mi6voUj + rm /tmp/tmp.ObzHY2n4SH /tmp/tmp.mF9mi6voUj + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.qLgtWfiE09/statefulset_some-name-rs0.yml + version_gt 1.22 ++ echo '1.26 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.qLgtWfiE09/statefulset_some-name-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.qLgtWfiE09/statefulset_some-name-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/statefulset_some-name-rs0.yml == */cronjob* ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/statefulset_some-name-rs0.yml /tmp/tmp.qLgtWfiE09/statefulset_some-name-rs0.yml + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@some-name-rs0.scheduled-backup-2969 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@some-name-rs0.scheduled-backup-2969 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1PWHcQ3brC +++ mktemp ++ local LAST_ERR=/tmp/tmp.x4FqIFSEJY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1PWHcQ3brC ++ cat /tmp/tmp.x4FqIFSEJY ++ rm /tmp/tmp.1PWHcQ3brC /tmp/tmp.x4FqIFSEJY ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ userAdmin:userAdmin123456@some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.uBTXbD9BR8 ++ mktemp + local LAST_ERR=/tmp/tmp.hhu4rD9sGj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uBTXbD9BR8 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("81d84717-007e-4390-ad1d-5f8f392ad965") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.hhu4rD9sGj + rm /tmp/tmp.uBTXbD9BR8 /tmp/tmp.hhu4rD9sGj + return 0 + sleep 2 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@some-name-rs0.scheduled-backup-2969 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@some-name-rs0.scheduled-backup-2969 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H7MAYDDr3a +++ mktemp ++ local LAST_ERR=/tmp/tmp.YO4HI4ySW0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.H7MAYDDr3a ++ cat /tmp/tmp.YO4HI4ySW0 ++ rm /tmp/tmp.H7MAYDDr3a /tmp/tmp.YO4HI4ySW0 ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.dFSIK0z5bP ++ mktemp + local LAST_ERR=/tmp/tmp.ZRK87ODxMp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dFSIK0z5bP Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("065ae380-b50f-4cd4-ae52-6381d385f683") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.ZRK87ODxMp + rm /tmp/tmp.dFSIK0z5bP /tmp/tmp.ZRK87ODxMp + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wG6D4pJ1nz +++ mktemp ++ local LAST_ERR=/tmp/tmp.D5DtVhdS4e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wG6D4pJ1nz ++ cat /tmp/tmp.D5DtVhdS4e ++ rm /tmp/tmp.wG6D4pJ1nz /tmp/tmp.D5DtVhdS4e ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.xozNHS9wSw ++ mktemp + local LAST_ERR=/tmp/tmp.PjglU8JyWJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xozNHS9wSw + cat /tmp/tmp.PjglU8JyWJ + rm /tmp/tmp.xozNHS9wSw /tmp/tmp.PjglU8JyWJ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PsrNMtu1Vv +++ mktemp ++ local LAST_ERR=/tmp/tmp.hWqyW85aAv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PsrNMtu1Vv ++ cat /tmp/tmp.hWqyW85aAv ++ rm /tmp/tmp.PsrNMtu1Vv /tmp/tmp.hWqyW85aAv ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SUdfOsgV8r ++ mktemp + local LAST_ERR=/tmp/tmp.2dThwX8gv2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SUdfOsgV8r + cat /tmp/tmp.2dThwX8gv2 + rm /tmp/tmp.SUdfOsgV8r /tmp/tmp.2dThwX8gv2 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local LAST_OUT=/tmp/tmp.homcl40Uum +++ mktemp ++ local LAST_ERR=/tmp/tmp.CsRZuA1ENj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.homcl40Uum ++ cat /tmp/tmp.CsRZuA1ENj ++ rm /tmp/tmp.homcl40Uum /tmp/tmp.CsRZuA1ENj ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bxxlGMxEaq ++ mktemp + local LAST_ERR=/tmp/tmp.ZV4juUduap + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bxxlGMxEaq + cat /tmp/tmp.ZV4juUduap + rm /tmp/tmp.bxxlGMxEaq /tmp/tmp.ZV4juUduap + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + wait_backup_agent some-name-rs0-0 + local agent_pod=some-name-rs0-0 + set +o xtrace some-name-rs0-0 + wait_backup_agent some-name-rs0-1 + local agent_pod=some-name-rs0-1 + set +o xtrace some-name-rs0-1 + wait_backup_agent some-name-rs0-2 + local agent_pod=some-name-rs0-2 + set +o xtrace some-name-rs0-2 + desc 'add backups schedule, wait for the first backup' + set +o xtrace ----------------------------------------------------------------------------------- add backups schedule, wait for the first backup ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0-2.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0-2.yml + kubectl_bin apply -f - + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' ++ mktemp + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.wIbHvDoGAm + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e"' ++ mktemp + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0-2.yml + local LAST_ERR=/tmp/tmp.Oj3nG5KGvy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wIbHvDoGAm perconaservermongodb.psmdb.percona.com/some-name configured + cat /tmp/tmp.Oj3nG5KGvy + rm /tmp/tmp.wIbHvDoGAm /tmp/tmp.Oj3nG5KGvy + return 0 + sleep 55 + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml ++ mktemp + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + local LAST_OUT=/tmp/tmp.Rnc8A1mftL + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_ERR=/tmp/tmp.r5oN5WyfJr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Rnc8A1mftL perconaservermongodb.psmdb.percona.com/some-name configured + cat /tmp/tmp.r5oN5WyfJr + rm /tmp/tmp.Rnc8A1mftL /tmp/tmp.r5oN5WyfJr + return 0 + '[' -z '' ']' ++ kubectl_bin get psmdb-backup ++ grep aws-s3 ++ awk '{print$1}' ++ head -1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.mZfGiHRgpa +++ mktemp ++ local LAST_ERR=/tmp/tmp.dywtNz2oe3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mZfGiHRgpa ++ cat /tmp/tmp.dywtNz2oe3 ++ rm /tmp/tmp.mZfGiHRgpa /tmp/tmp.dywtNz2oe3 ++ return 0 + backup_name_aws=cron-some-name-20240708170400-4nxhf ++ kubectl_bin get psmdb-backup ++ grep gcp-cs ++ awk '{print$1}' +++ mktemp ++ head -1 ++ local LAST_OUT=/tmp/tmp.A5QW1BMEiR +++ mktemp ++ local LAST_ERR=/tmp/tmp.OS8SzxqjiV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.A5QW1BMEiR ++ cat /tmp/tmp.OS8SzxqjiV ++ rm /tmp/tmp.A5QW1BMEiR /tmp/tmp.OS8SzxqjiV ++ return 0 + backup_name_gcp=cron-some-name-20240708170400-rthjw ++ kubectl_bin get psmdb-backup ++ grep azure-blob ++ head -1 +++ mktemp ++ awk '{print$1}' ++ local LAST_OUT=/tmp/tmp.j2XKdOJhh2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3aRJj0vyVy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.j2XKdOJhh2 ++ cat /tmp/tmp.3aRJj0vyVy ++ rm /tmp/tmp.j2XKdOJhh2 /tmp/tmp.3aRJj0vyVy ++ return 0 + backup_name_azure=cron-some-name-20240708170400-h8wn4 + wait_backup cron-some-name-20240708170400-4nxhf + local backup_name=cron-some-name-20240708170400-4nxhf + set +o xtrace cron-some-name-20240708170400-4nxhf..................... + wait_backup cron-some-name-20240708170400-rthjw + local backup_name=cron-some-name-20240708170400-rthjw + set +o xtrace cron-some-name-20240708170400-rthjw.................................... + wait_backup cron-some-name-20240708170400-h8wn4 + local backup_name=cron-some-name-20240708170400-h8wn4 + set +o xtrace cron-some-name-20240708170400-h8wn4. ++ kubectl_bin get psmdb-backup ++ grep minio ++ awk '{print$1}' +++ mktemp ++ head -1 ++ local LAST_OUT=/tmp/tmp.yNpLTzR3si +++ mktemp ++ local LAST_ERR=/tmp/tmp.K7SvUHcUON ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yNpLTzR3si ++ cat /tmp/tmp.K7SvUHcUON ++ rm /tmp/tmp.yNpLTzR3si /tmp/tmp.K7SvUHcUON ++ return 0 + backup_name_minio=cron-some-name-20240708170400-tvtc2 + wait_backup cron-some-name-20240708170400-tvtc2 + local backup_name=cron-some-name-20240708170400-tvtc2 + set +o xtrace cron-some-name-20240708170400-tvtc2. + sleep 5 + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- ++ get_backup_dest cron-some-name-20240708170400-tvtc2 ++ local backup_name=cron-some-name-20240708170400-tvtc2 ++ kubectl_bin get psmdb-backup cron-some-name-20240708170400-tvtc2 -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|azure://||' ++ sed 's|s3://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eWDnWAqsXJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.RZkhWPRN4D ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup cron-some-name-20240708170400-tvtc2 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eWDnWAqsXJ ++ cat /tmp/tmp.RZkhWPRN4D ++ rm /tmp/tmp.eWDnWAqsXJ /tmp/tmp.RZkhWPRN4D ++ return 0 + backup_dest_minio=operator-testing/2024-07-08T17:04:21Z + kubectl_bin run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2024-07-08T17:04:21Z/rs0/ + grep myApp.test.gz ++ mktemp + local LAST_OUT=/tmp/tmp.TOzsDONPHR ++ mktemp + local LAST_ERR=/tmp/tmp.O2smPI4GCc + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls s3://operator-testing/2024-07-08T17:04:21Z/rs0/ + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TOzsDONPHR + cat /tmp/tmp.O2smPI4GCc + rm /tmp/tmp.TOzsDONPHR /tmp/tmp.O2smPI4GCc + return 0 2024-07-08 17:04:26 55 myApp.test.gz + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.scheduled-backup-2969 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.scheduled-backup-2969 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zelGtiSEQH +++ mktemp ++ local LAST_ERR=/tmp/tmp.05XuHFJjQa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zelGtiSEQH ++ cat /tmp/tmp.05XuHFJjQa ++ rm /tmp/tmp.zelGtiSEQH /tmp/tmp.05XuHFJjQa ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.cyigPHFV3E ++ mktemp + local LAST_ERR=/tmp/tmp.Zs3PrfSLzf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cyigPHFV3E Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("47c6e595-f64c-4b63-a3be-47dde45ce620") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.Zs3PrfSLzf + rm /tmp/tmp.cyigPHFV3E /tmp/tmp.Zs3PrfSLzf + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s3InyKUKCb +++ mktemp ++ local LAST_ERR=/tmp/tmp.z3JkYjFc0Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.s3InyKUKCb ++ cat /tmp/tmp.z3JkYjFc0Q ++ rm /tmp/tmp.s3InyKUKCb /tmp/tmp.z3JkYjFc0Q ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.RPYInVItKd ++ mktemp + local LAST_ERR=/tmp/tmp.RfDYDtBhi8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RPYInVItKd + cat /tmp/tmp.RfDYDtBhi8 + rm /tmp/tmp.RPYInVItKd /tmp/tmp.RfDYDtBhi8 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J6EZkSnEjs +++ mktemp ++ local LAST_ERR=/tmp/tmp.pkeIbxQbPc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.J6EZkSnEjs ++ cat /tmp/tmp.pkeIbxQbPc ++ rm /tmp/tmp.J6EZkSnEjs /tmp/tmp.pkeIbxQbPc ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.gpxLIsSqRP ++ mktemp + local LAST_ERR=/tmp/tmp.HJe6ER4DTy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gpxLIsSqRP + cat /tmp/tmp.HJe6ER4DTy + rm /tmp/tmp.gpxLIsSqRP /tmp/tmp.HJe6ER4DTy + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7bM0OTTYGH +++ mktemp ++ local LAST_ERR=/tmp/tmp.8VKFU4aIy2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7bM0OTTYGH ++ cat /tmp/tmp.8VKFU4aIy2 ++ rm /tmp/tmp.7bM0OTTYGH /tmp/tmp.8VKFU4aIy2 ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.iJzhqXhZ0n ++ mktemp + local LAST_ERR=/tmp/tmp.R6gEoAu2hl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iJzhqXhZ0n + cat /tmp/tmp.R6gEoAu2hl + rm /tmp/tmp.iJzhqXhZ0n /tmp/tmp.R6gEoAu2hl + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + run_restore cron-some-name-20240708170400-tvtc2 + local backup_name=cron-some-name-20240708170400-tvtc2 + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-cron-some-name-20240708170400-tvtc2/' + kubectl_bin apply -f - + /usr/bin/sed -e 's/backupName:/backupName: cron-some-name-20240708170400-tvtc2/' ++ mktemp + local LAST_OUT=/tmp/tmp.rRsaWmVPTd ++ mktemp + local LAST_ERR=/tmp/tmp.J9y3zaigLL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rRsaWmVPTd perconaservermongodbrestore.psmdb.percona.com/restore-cron-some-name-20240708170400-tvtc2 created + cat /tmp/tmp.J9y3zaigLL + rm /tmp/tmp.rRsaWmVPTd /tmp/tmp.J9y3zaigLL + return 0 + wait_restore cron-some-name-20240708170400-tvtc2 some-name + local backup_name=cron-some-name-20240708170400-tvtc2 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/cron-some-name-20240708170400-tvtc2 to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sQoZoQvBXk +++ mktemp ++ local LAST_ERR=/tmp/tmp.YT4aoTRP9S ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sQoZoQvBXk ++ cat /tmp/tmp.YT4aoTRP9S ++ rm /tmp/tmp.sQoZoQvBXk /tmp/tmp.YT4aoTRP9S ++ return 0 + [[ ready == \r\e\a\d\y ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jfR5ENCfsR +++ mktemp ++ local LAST_ERR=/tmp/tmp.kUB71nbQ20 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.jfR5ENCfsR ++ cat /tmp/tmp.kUB71nbQ20 ++ rm /tmp/tmp.jfR5ENCfsR /tmp/tmp.kUB71nbQ20 ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.3FOPT8tuX0 ++ mktemp + local LAST_ERR=/tmp/tmp.QFmCytffmw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3FOPT8tuX0 + cat /tmp/tmp.QFmCytffmw + rm /tmp/tmp.3FOPT8tuX0 /tmp/tmp.QFmCytffmw + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CYQVpzbOJD +++ mktemp ++ local LAST_ERR=/tmp/tmp.n6rcONRI2d ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CYQVpzbOJD ++ cat /tmp/tmp.n6rcONRI2d ++ rm /tmp/tmp.CYQVpzbOJD /tmp/tmp.n6rcONRI2d ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.YpmHFGSEnI ++ mktemp + local LAST_ERR=/tmp/tmp.7eF5fHhwq2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YpmHFGSEnI + cat /tmp/tmp.7eF5fHhwq2 + rm /tmp/tmp.YpmHFGSEnI /tmp/tmp.7eF5fHhwq2 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U1Ys0G5rTz +++ mktemp ++ local LAST_ERR=/tmp/tmp.VS8gDJkVgp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.U1Ys0G5rTz ++ cat /tmp/tmp.VS8gDJkVgp ++ rm /tmp/tmp.U1Ys0G5rTz /tmp/tmp.VS8gDJkVgp ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.55WstzB9uY ++ mktemp + local LAST_ERR=/tmp/tmp.jS0G1ua13O + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.55WstzB9uY + cat /tmp/tmp.jS0G1ua13O + rm /tmp/tmp.55WstzB9uY /tmp/tmp.jS0G1ua13O + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + '[' -z '' ']' + desc 'check backup and restore -- aws-s3' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- ++ get_backup_dest cron-some-name-20240708170400-4nxhf ++ local backup_name=cron-some-name-20240708170400-4nxhf ++ kubectl_bin get psmdb-backup cron-some-name-20240708170400-4nxhf -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' +++ mktemp ++ sed 's|azure://||' ++ local LAST_OUT=/tmp/tmp.9UxGDB0SzF +++ mktemp ++ local LAST_ERR=/tmp/tmp.wgQN5DA1NY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup cron-some-name-20240708170400-4nxhf -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9UxGDB0SzF ++ cat /tmp/tmp.wgQN5DA1NY ++ rm /tmp/tmp.9UxGDB0SzF /tmp/tmp.wgQN5DA1NY ++ return 0 + backup_dest_aws=operator-testing/psmdb-scheduled-backup/2024-07-08T17:04:53Z + curl -s https://s3.amazonaws.com/operator-testing/psmdb-scheduled-backup/2024-07-08T17:04:53Z/rs0/myApp.test.gz + gunzip + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.scheduled-backup-2969 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.scheduled-backup-2969 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rqkdgob2Hf +++ mktemp ++ local LAST_ERR=/tmp/tmp.LdNIOuCEUV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Rqkdgob2Hf ++ cat /tmp/tmp.LdNIOuCEUV ++ rm /tmp/tmp.Rqkdgob2Hf /tmp/tmp.LdNIOuCEUV ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.f6wuUwSRQ2 ++ mktemp + local LAST_ERR=/tmp/tmp.EZ1rwqzF9M + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.f6wuUwSRQ2 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("55b0a8dc-b032-4401-8530-1eb7a00c6ff5") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.EZ1rwqzF9M + rm /tmp/tmp.f6wuUwSRQ2 /tmp/tmp.EZ1rwqzF9M + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gUA83sKidv +++ mktemp ++ local LAST_ERR=/tmp/tmp.OzB2Fha7RU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gUA83sKidv ++ cat /tmp/tmp.OzB2Fha7RU ++ rm /tmp/tmp.gUA83sKidv /tmp/tmp.OzB2Fha7RU ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jTeWPNoe6k ++ mktemp + local LAST_ERR=/tmp/tmp.VARiOn3Kom + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jTeWPNoe6k + cat /tmp/tmp.VARiOn3Kom + rm /tmp/tmp.jTeWPNoe6k /tmp/tmp.VARiOn3Kom + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NltFMGVzGb +++ mktemp ++ local LAST_ERR=/tmp/tmp.FLGq1bB5Ji ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NltFMGVzGb ++ cat /tmp/tmp.FLGq1bB5Ji ++ rm /tmp/tmp.NltFMGVzGb /tmp/tmp.FLGq1bB5Ji ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.YC5BrGQZIY ++ mktemp + local LAST_ERR=/tmp/tmp.Sl2fjBuJBh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YC5BrGQZIY + cat /tmp/tmp.Sl2fjBuJBh + rm /tmp/tmp.YC5BrGQZIY /tmp/tmp.Sl2fjBuJBh + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TiDFtUFysd +++ mktemp ++ local LAST_ERR=/tmp/tmp.X50Ry5ISL3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.TiDFtUFysd ++ cat /tmp/tmp.X50Ry5ISL3 ++ rm /tmp/tmp.TiDFtUFysd /tmp/tmp.X50Ry5ISL3 ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.MseegS8Bzl ++ mktemp + local LAST_ERR=/tmp/tmp.9agqMY118K + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MseegS8Bzl + cat /tmp/tmp.9agqMY118K + rm /tmp/tmp.MseegS8Bzl /tmp/tmp.9agqMY118K + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + run_restore cron-some-name-20240708170400-4nxhf + local backup_name=cron-some-name-20240708170400-4nxhf + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/restore.yml + /usr/bin/sed -e 's/backupName:/backupName: cron-some-name-20240708170400-4nxhf/' + kubectl_bin apply -f - + /usr/bin/sed -e 's/name:/name: restore-cron-some-name-20240708170400-4nxhf/' ++ mktemp + local LAST_OUT=/tmp/tmp.t0ss8Mfq4i ++ mktemp + local LAST_ERR=/tmp/tmp.PoDLztBgcp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.t0ss8Mfq4i perconaservermongodbrestore.psmdb.percona.com/restore-cron-some-name-20240708170400-4nxhf created + cat /tmp/tmp.PoDLztBgcp + rm /tmp/tmp.t0ss8Mfq4i /tmp/tmp.PoDLztBgcp + return 0 + wait_restore cron-some-name-20240708170400-4nxhf some-name + local backup_name=cron-some-name-20240708170400-4nxhf + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/cron-some-name-20240708170400-4nxhf to reach ready state................... + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JndDDN9UPP +++ mktemp ++ local LAST_ERR=/tmp/tmp.z90l1M8fgO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.JndDDN9UPP ++ cat /tmp/tmp.z90l1M8fgO ++ rm /tmp/tmp.JndDDN9UPP /tmp/tmp.z90l1M8fgO ++ return 0 + [[ ready == \r\e\a\d\y ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WeT2OPtYd6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4P2TUlvnEW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WeT2OPtYd6 ++ cat /tmp/tmp.4P2TUlvnEW ++ rm /tmp/tmp.WeT2OPtYd6 /tmp/tmp.4P2TUlvnEW ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.kvBpmCdCLT ++ mktemp + local LAST_ERR=/tmp/tmp.idayjNW40z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kvBpmCdCLT + cat /tmp/tmp.idayjNW40z + rm /tmp/tmp.kvBpmCdCLT /tmp/tmp.idayjNW40z + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FJMzLmf7YL +++ mktemp ++ local LAST_ERR=/tmp/tmp.8XDCvdIyxp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FJMzLmf7YL ++ cat /tmp/tmp.8XDCvdIyxp ++ rm /tmp/tmp.FJMzLmf7YL /tmp/tmp.8XDCvdIyxp ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.idCTvPHKXT ++ mktemp + local LAST_ERR=/tmp/tmp.YJ8nmFtEa9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.idCTvPHKXT + cat /tmp/tmp.YJ8nmFtEa9 + rm /tmp/tmp.idCTvPHKXT /tmp/tmp.YJ8nmFtEa9 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SFJlCJk2jZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.oX2kLzezZ4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SFJlCJk2jZ ++ cat /tmp/tmp.oX2kLzezZ4 ++ rm /tmp/tmp.SFJlCJk2jZ /tmp/tmp.oX2kLzezZ4 ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.bZxiUR7oeG ++ mktemp + local LAST_ERR=/tmp/tmp.MDIXKjnKMk + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bZxiUR7oeG + cat /tmp/tmp.MDIXKjnKMk + rm /tmp/tmp.bZxiUR7oeG /tmp/tmp.MDIXKjnKMk + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- ++ get_backup_dest cron-some-name-20240708170400-rthjw ++ local backup_name=cron-some-name-20240708170400-rthjw ++ kubectl_bin get psmdb-backup cron-some-name-20240708170400-rthjw -o 'jsonpath={.status.destination}' ++ sed -e 's/.json$//' ++ sed 's|s3://||' ++ sed 's|azure://||' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rd7kZdnRHJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.DomH912Fe8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup cron-some-name-20240708170400-rthjw -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rd7kZdnRHJ ++ cat /tmp/tmp.DomH912Fe8 ++ rm /tmp/tmp.rd7kZdnRHJ /tmp/tmp.DomH912Fe8 ++ return 0 + backup_dest_gcp=operator-testing/psmdb-scheduled-backup/2024-07-08T17:05:56Z + curl -s https://storage.googleapis.com/operator-testing/psmdb-scheduled-backup/2024-07-08T17:05:56Z/rs0/myApp.test.gz + gunzip + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.scheduled-backup-2969 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.scheduled-backup-2969 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NxL9er6XjK +++ mktemp ++ local LAST_ERR=/tmp/tmp.zzNoNOIasx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NxL9er6XjK ++ cat /tmp/tmp.zzNoNOIasx ++ rm /tmp/tmp.NxL9er6XjK /tmp/tmp.zzNoNOIasx ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.CpKFt1Lnvr ++ mktemp + local LAST_ERR=/tmp/tmp.3ytztMTby1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CpKFt1Lnvr Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("c13953bb-212a-436d-8774-59dacb4437ae") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.3ytztMTby1 + rm /tmp/tmp.CpKFt1Lnvr /tmp/tmp.3ytztMTby1 + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local LAST_OUT=/tmp/tmp.ToAjFCR6xk +++ mktemp ++ local LAST_ERR=/tmp/tmp.4RHRsKBLpE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ToAjFCR6xk ++ cat /tmp/tmp.4RHRsKBLpE ++ rm /tmp/tmp.ToAjFCR6xk /tmp/tmp.4RHRsKBLpE ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.lZX5hhRWMN ++ mktemp + local LAST_ERR=/tmp/tmp.zqZsAgjcY4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lZX5hhRWMN + cat /tmp/tmp.zqZsAgjcY4 + rm /tmp/tmp.lZX5hhRWMN /tmp/tmp.zqZsAgjcY4 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dt8v9ZR7hl +++ mktemp ++ local LAST_ERR=/tmp/tmp.WBMPSqw5ZV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dt8v9ZR7hl ++ cat /tmp/tmp.WBMPSqw5ZV ++ rm /tmp/tmp.dt8v9ZR7hl /tmp/tmp.WBMPSqw5ZV ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Cgi8Ye1v6M ++ mktemp + local LAST_ERR=/tmp/tmp.QU5JYD9ebK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Cgi8Ye1v6M + cat /tmp/tmp.QU5JYD9ebK + rm /tmp/tmp.Cgi8Ye1v6M /tmp/tmp.QU5JYD9ebK + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VS9BOU22MD +++ mktemp ++ local LAST_ERR=/tmp/tmp.BabMI1ZvM2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VS9BOU22MD ++ cat /tmp/tmp.BabMI1ZvM2 ++ rm /tmp/tmp.VS9BOU22MD /tmp/tmp.BabMI1ZvM2 ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rKyKv7vAeO ++ mktemp + local LAST_ERR=/tmp/tmp.2yhRMrQOzs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rKyKv7vAeO + cat /tmp/tmp.2yhRMrQOzs + rm /tmp/tmp.rKyKv7vAeO /tmp/tmp.2yhRMrQOzs + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + run_restore cron-some-name-20240708170400-rthjw + local backup_name=cron-some-name-20240708170400-rthjw + /usr/bin/sed -e 's/name:/name: restore-cron-some-name-20240708170400-rthjw/' + /usr/bin/sed -e 's/backupName:/backupName: cron-some-name-20240708170400-rthjw/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/restore.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.6o6tDgSqqU ++ mktemp + local LAST_ERR=/tmp/tmp.Xx8uBI7fw0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6o6tDgSqqU perconaservermongodbrestore.psmdb.percona.com/restore-cron-some-name-20240708170400-rthjw created + cat /tmp/tmp.Xx8uBI7fw0 + rm /tmp/tmp.6o6tDgSqqU /tmp/tmp.Xx8uBI7fw0 + return 0 + wait_restore cron-some-name-20240708170400-rthjw some-name + local backup_name=cron-some-name-20240708170400-rthjw + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/cron-some-name-20240708170400-rthjw to reach ready state.............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yIYliCMVpt +++ mktemp ++ local LAST_ERR=/tmp/tmp.r7BzWa9DUv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yIYliCMVpt ++ cat /tmp/tmp.r7BzWa9DUv ++ rm /tmp/tmp.yIYliCMVpt /tmp/tmp.r7BzWa9DUv ++ return 0 + [[ ready == \r\e\a\d\y ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tnfJ7FIpex +++ mktemp ++ local LAST_ERR=/tmp/tmp.b6iBcC9JAi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tnfJ7FIpex ++ cat /tmp/tmp.b6iBcC9JAi ++ rm /tmp/tmp.tnfJ7FIpex /tmp/tmp.b6iBcC9JAi ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ajStdYzq9I ++ mktemp + local LAST_ERR=/tmp/tmp.Vzol9T6RKO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ajStdYzq9I + cat /tmp/tmp.Vzol9T6RKO + rm /tmp/tmp.ajStdYzq9I /tmp/tmp.Vzol9T6RKO + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L5q5wiiFMv +++ mktemp ++ local LAST_ERR=/tmp/tmp.pxtbTg9KWG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.L5q5wiiFMv ++ cat /tmp/tmp.pxtbTg9KWG ++ rm /tmp/tmp.L5q5wiiFMv /tmp/tmp.pxtbTg9KWG ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Uh9Q7p9Oxh ++ mktemp + local LAST_ERR=/tmp/tmp.3G3x6tZz5H + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Uh9Q7p9Oxh + cat /tmp/tmp.3G3x6tZz5H + rm /tmp/tmp.Uh9Q7p9Oxh /tmp/tmp.3G3x6tZz5H + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Y2Ce7IO78 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0CF0IELDfL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8Y2Ce7IO78 ++ cat /tmp/tmp.0CF0IELDfL ++ rm /tmp/tmp.8Y2Ce7IO78 /tmp/tmp.0CF0IELDfL ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.jiDRtFIP8w ++ mktemp + local LAST_ERR=/tmp/tmp.IGxJT3DuYj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jiDRtFIP8w + cat /tmp/tmp.IGxJT3DuYj + rm /tmp/tmp.jiDRtFIP8w /tmp/tmp.IGxJT3DuYj + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- ++ get_backup_dest cron-some-name-20240708170400-h8wn4 ++ local backup_name=cron-some-name-20240708170400-h8wn4 ++ kubectl_bin get psmdb-backup cron-some-name-20240708170400-h8wn4 -o 'jsonpath={.status.destination}' ++ sed 's|s3://||' ++ sed 's|azure://||' ++ sed -e 's/.json$//' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3vTlR0ysPm +++ mktemp ++ local LAST_ERR=/tmp/tmp.MwvUCMnf0g ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup cron-some-name-20240708170400-h8wn4 -o 'jsonpath={.status.destination}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3vTlR0ysPm ++ cat /tmp/tmp.MwvUCMnf0g ++ rm /tmp/tmp.3vTlR0ysPm /tmp/tmp.MwvUCMnf0g ++ return 0 + backup_dest_azure=operator-testing/psmdb-scheduled-backup/2024-07-08T17:05:24Z + gunzip + curl -s https://engk8soperators.blob.core.windows.net/operator-testing/psmdb-scheduled-backup/2024-07-08T17:05:24Z/rs0/myApp.test.gz + run_mongo 'use myApp\n db.test.insert({ x: 100501 })' myApp:myPass@some-name-rs0.scheduled-backup-2969 + local 'command=use myApp\n db.test.insert({ x: 100501 })' + local uri=myApp:myPass@some-name-rs0.scheduled-backup-2969 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FfAI9F3myv +++ mktemp ++ local LAST_ERR=/tmp/tmp.GIzS8JWi9J ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FfAI9F3myv ++ cat /tmp/tmp.GIzS8JWi9J ++ rm /tmp/tmp.FfAI9F3myv /tmp/tmp.GIzS8JWi9J ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.SPNVwgB4jP ++ mktemp + local LAST_ERR=/tmp/tmp.zJYJ1PZEhf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100501 })\n'\'' | mongo mongodb+srv://myApp:myPass@some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SPNVwgB4jP Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017,some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d3e8a9d7-3eb8-42c1-811c-d7b1631446bc") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.zJYJ1PZEhf + rm /tmp/tmp.SPNVwgB4jP /tmp/tmp.zJYJ1PZEhf + return 0 + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BsVtS43e2T +++ mktemp ++ local LAST_ERR=/tmp/tmp.fJaA92CDFU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BsVtS43e2T ++ cat /tmp/tmp.fJaA92CDFU ++ rm /tmp/tmp.BsVtS43e2T /tmp/tmp.fJaA92CDFU ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.LLrpqH8n1K ++ mktemp + local LAST_ERR=/tmp/tmp.w3ezeK73HM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LLrpqH8n1K + cat /tmp/tmp.w3ezeK73HM + rm /tmp/tmp.LLrpqH8n1K /tmp/tmp.w3ezeK73HM + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yYavPWfi9R +++ mktemp ++ local LAST_ERR=/tmp/tmp.lvuTiaInYx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yYavPWfi9R ++ cat /tmp/tmp.lvuTiaInYx ++ rm /tmp/tmp.yYavPWfi9R /tmp/tmp.lvuTiaInYx ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.BQLb0MTwN8 ++ mktemp + local LAST_ERR=/tmp/tmp.ZTUiD6COpy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BQLb0MTwN8 + cat /tmp/tmp.ZTUiD6COpy + rm /tmp/tmp.BQLb0MTwN8 /tmp/tmp.ZTUiD6COpy + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 -2nd + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix=-2nd + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SNysZTWqgF +++ mktemp ++ local LAST_ERR=/tmp/tmp.07jorgCJol ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SNysZTWqgF ++ cat /tmp/tmp.07jorgCJol ++ rm /tmp/tmp.SNysZTWqgF /tmp/tmp.07jorgCJol ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.qmj90yLU4p ++ mktemp + local LAST_ERR=/tmp/tmp.9lzx4UDx4I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.qmj90yLU4p + cat /tmp/tmp.9lzx4UDx4I + rm /tmp/tmp.qmj90yLU4p /tmp/tmp.9lzx4UDx4I + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find-2nd.json /tmp/tmp.qLgtWfiE09/find-2nd + run_restore cron-some-name-20240708170400-h8wn4 + local backup_name=cron-some-name-20240708170400-h8wn4 + /usr/bin/sed -e 's/name:/name: restore-cron-some-name-20240708170400-h8wn4/' + /usr/bin/sed -e 's/backupName:/backupName: cron-some-name-20240708170400-h8wn4/' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/restore.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ijOcLV17Dg ++ mktemp + local LAST_ERR=/tmp/tmp.JFwMZGNsQ2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ijOcLV17Dg perconaservermongodbrestore.psmdb.percona.com/restore-cron-some-name-20240708170400-h8wn4 created + cat /tmp/tmp.JFwMZGNsQ2 + rm /tmp/tmp.ijOcLV17Dg /tmp/tmp.JFwMZGNsQ2 + return 0 + wait_restore cron-some-name-20240708170400-h8wn4 some-name + local backup_name=cron-some-name-20240708170400-h8wn4 + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/cron-some-name-20240708170400-h8wn4 to reach ready state.............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qlGDSSXsKq +++ mktemp ++ local LAST_ERR=/tmp/tmp.BluxplDySX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.qlGDSSXsKq ++ cat /tmp/tmp.BluxplDySX ++ rm /tmp/tmp.qlGDSSXsKq /tmp/tmp.BluxplDySX ++ return 0 + [[ ready == \r\e\a\d\y ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IfsPsmISQL +++ mktemp ++ local LAST_ERR=/tmp/tmp.a51LEgFgXj ++ local exit_status=0 ++ local timeout=4 + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IfsPsmISQL ++ cat /tmp/tmp.a51LEgFgXj ++ rm /tmp/tmp.IfsPsmISQL /tmp/tmp.a51LEgFgXj ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.81I6ST7yvd ++ mktemp + local LAST_ERR=/tmp/tmp.8XScip3CkI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.81I6ST7yvd + cat /tmp/tmp.8XScip3CkI + rm /tmp/tmp.81I6ST7yvd /tmp/tmp.8XScip3CkI + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KRwBxkj3eW +++ mktemp ++ local LAST_ERR=/tmp/tmp.5R1VwzLc6N ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KRwBxkj3eW ++ cat /tmp/tmp.5R1VwzLc6N ++ rm /tmp/tmp.KRwBxkj3eW /tmp/tmp.5R1VwzLc6N ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.pGOaKtD9Yg ++ mktemp + local LAST_ERR=/tmp/tmp.mm2gyX2gTb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pGOaKtD9Yg + cat /tmp/tmp.mm2gyX2gTb + rm /tmp/tmp.pGOaKtD9Yg /tmp/tmp.mm2gyX2gTb + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HRN4O3TS1y +++ mktemp ++ local LAST_ERR=/tmp/tmp.1PeSO7GQFj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.HRN4O3TS1y ++ cat /tmp/tmp.1PeSO7GQFj ++ rm /tmp/tmp.HRN4O3TS1y /tmp/tmp.1PeSO7GQFj ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.ocYYy0CQF9 ++ mktemp + local LAST_ERR=/tmp/tmp.v77iOe6OKQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ocYYy0CQF9 + cat /tmp/tmp.v77iOe6OKQ + rm /tmp/tmp.ocYYy0CQF9 /tmp/tmp.v77iOe6OKQ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + desc 'add physical backup schedule, wait for the first backup' + set +o xtrace ----------------------------------------------------------------------------------- add physical backup schedule, wait for the first backup ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0-3.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0-3.yml + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0-3.yml ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + local LAST_OUT=/tmp/tmp.9ZRuZkAKlH + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e"' + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ mktemp + local LAST_ERR=/tmp/tmp.1qHYutWM8w + local exit_status=0 + local timeout=4 + yq eval '.spec.upgradeOptions.apply="Never"' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9ZRuZkAKlH perconaservermongodb.psmdb.percona.com/some-name configured + cat /tmp/tmp.1qHYutWM8w + rm /tmp/tmp.9ZRuZkAKlH /tmp/tmp.1qHYutWM8w + return 0 + sleep 55 + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/some-name-rs0.yml + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' ++ mktemp + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1567-b27e0b5e"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.HJAwA2Zcc4 ++ mktemp + local LAST_ERR=/tmp/tmp.p7OTHpSzbL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HJAwA2Zcc4 perconaservermongodb.psmdb.percona.com/some-name configured + cat /tmp/tmp.p7OTHpSzbL + rm /tmp/tmp.HJAwA2Zcc4 /tmp/tmp.p7OTHpSzbL + return 0 ++ kubectl_bin get psmdb-backup ++ grep minio ++ awk '{print $1}' +++ mktemp ++ tail -1 ++ local LAST_OUT=/tmp/tmp.5VAZKeeGvm +++ mktemp ++ local LAST_ERR=/tmp/tmp.n6Ls5DlZbX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb-backup ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5VAZKeeGvm ++ cat /tmp/tmp.n6Ls5DlZbX ++ rm /tmp/tmp.5VAZKeeGvm /tmp/tmp.n6Ls5DlZbX ++ return 0 + backup_name_minio=cron-some-name-20240708171100-6j6kr + wait_backup cron-some-name-20240708171100-6j6kr + local backup_name=cron-some-name-20240708171100-6j6kr + set +o xtrace cron-some-name-20240708171100-6j6kr................. + sleep 5 + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- + run_restore cron-some-name-20240708171100-6j6kr 3 + local backup_name=cron-some-name-20240708171100-6j6kr + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/conf/restore.yml + /usr/bin/sed -e 's/name:/name: restore-cron-some-name-20240708171100-6j6kr/' + kubectl_bin apply -f - + /usr/bin/sed -e 's/backupName:/backupName: cron-some-name-20240708171100-6j6kr/' ++ mktemp + local LAST_OUT=/tmp/tmp.DnIAfUV7Hv ++ mktemp + local LAST_ERR=/tmp/tmp.rD2gzKwXOf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DnIAfUV7Hv perconaservermongodbrestore.psmdb.percona.com/restore-cron-some-name-20240708171100-6j6kr created + cat /tmp/tmp.rD2gzKwXOf + rm /tmp/tmp.DnIAfUV7Hv /tmp/tmp.rD2gzKwXOf + return 0 + wait_restore cron-some-name-20240708171100-6j6kr some-name + local backup_name=cron-some-name-20240708171100-6j6kr + local cluster_name=some-name + local target_state=ready + local wait_cluster_consistency=1 + local wait_time=780 + set +o xtrace waiting psmdb-restore/cron-some-name-20240708171100-6j6kr to reach ready state............................................................................................................................................... + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3skYC09viZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.lUxNF07UDN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3skYC09viZ ++ cat /tmp/tmp.lUxNF07UDN ++ rm /tmp/tmp.3skYC09viZ /tmp/tmp.lUxNF07UDN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.woGnvFouo5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GkOxc0D562 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.woGnvFouo5 ++ cat /tmp/tmp.GkOxc0D562 ++ rm /tmp/tmp.woGnvFouo5 /tmp/tmp.GkOxc0D562 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6qgMOI6ZCe +++ mktemp ++ local LAST_ERR=/tmp/tmp.fnNGL3LuYi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6qgMOI6ZCe ++ cat /tmp/tmp.fnNGL3LuYi ++ rm /tmp/tmp.6qgMOI6ZCe /tmp/tmp.fnNGL3LuYi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kg8WrnN7JK +++ mktemp ++ local LAST_ERR=/tmp/tmp.hRo7fTJk1U ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kg8WrnN7JK ++ cat /tmp/tmp.hRo7fTJk1U ++ rm /tmp/tmp.kg8WrnN7JK /tmp/tmp.hRo7fTJk1U ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9cIGOnkzzs +++ mktemp ++ local LAST_ERR=/tmp/tmp.b1gNCerWSD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.9cIGOnkzzs ++ cat /tmp/tmp.b1gNCerWSD ++ rm /tmp/tmp.9cIGOnkzzs /tmp/tmp.b1gNCerWSD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Aq30uTZh93 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kWQmNNXCgY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Aq30uTZh93 ++ cat /tmp/tmp.kWQmNNXCgY ++ rm /tmp/tmp.Aq30uTZh93 /tmp/tmp.kWQmNNXCgY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c1ZnB2yl9I +++ mktemp ++ local LAST_ERR=/tmp/tmp.C04CkWVdt3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.c1ZnB2yl9I ++ cat /tmp/tmp.C04CkWVdt3 ++ rm /tmp/tmp.c1ZnB2yl9I /tmp/tmp.C04CkWVdt3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S66h2MI9UK +++ mktemp ++ local LAST_ERR=/tmp/tmp.GSzItDJDk8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.S66h2MI9UK ++ cat /tmp/tmp.GSzItDJDk8 ++ rm /tmp/tmp.S66h2MI9UK /tmp/tmp.GSzItDJDk8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2wbPnnqAzg +++ mktemp ++ local LAST_ERR=/tmp/tmp.tbsfYfvwp9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2wbPnnqAzg ++ cat /tmp/tmp.tbsfYfvwp9 ++ rm /tmp/tmp.2wbPnnqAzg /tmp/tmp.tbsfYfvwp9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 9 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I5IB8alQ01 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mJL78NYIlq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.I5IB8alQ01 ++ cat /tmp/tmp.mJL78NYIlq ++ rm /tmp/tmp.I5IB8alQ01 /tmp/tmp.mJL78NYIlq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 10 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ebjJU7gdAj +++ mktemp ++ local LAST_ERR=/tmp/tmp.JoInt94QoZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ebjJU7gdAj ++ cat /tmp/tmp.JoInt94QoZ ++ rm /tmp/tmp.ebjJU7gdAj /tmp/tmp.JoInt94QoZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 11 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.be2AwB5a8E +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y673k1TrtM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.be2AwB5a8E ++ cat /tmp/tmp.Y673k1TrtM ++ rm /tmp/tmp.be2AwB5a8E /tmp/tmp.Y673k1TrtM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 12 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7uwzoPrW1l +++ mktemp ++ local LAST_ERR=/tmp/tmp.kkFOGxBOox ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7uwzoPrW1l ++ cat /tmp/tmp.kkFOGxBOox ++ rm /tmp/tmp.7uwzoPrW1l /tmp/tmp.kkFOGxBOox ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 13 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y029RE9O7t +++ mktemp ++ local LAST_ERR=/tmp/tmp.KTpJj0gs8O ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.y029RE9O7t ++ cat /tmp/tmp.KTpJj0gs8O ++ rm /tmp/tmp.y029RE9O7t /tmp/tmp.KTpJj0gs8O ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 14 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PumlCGyMje +++ mktemp ++ local LAST_ERR=/tmp/tmp.QR49wLTcCO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PumlCGyMje ++ cat /tmp/tmp.QR49wLTcCO ++ rm /tmp/tmp.PumlCGyMje /tmp/tmp.QR49wLTcCO ++ return 0 + [[ ready == \r\e\a\d\y ]] + compare_mongo_cmd find myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lt1GD4GluS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mztq762JSt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Lt1GD4GluS ++ cat /tmp/tmp.Mztq762JSt ++ rm /tmp/tmp.Lt1GD4GluS /tmp/tmp.Mztq762JSt ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.RwPZDZ9fMT ++ mktemp + local LAST_ERR=/tmp/tmp.C305e57ORx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-0.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.RwPZDZ9fMT + cat /tmp/tmp.C305e57ORx + rm /tmp/tmp.RwPZDZ9fMT /tmp/tmp.C305e57ORx + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.71qO5ZOZTa +++ mktemp ++ local LAST_ERR=/tmp/tmp.jKLSyjI6l6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.71qO5ZOZTa ++ cat /tmp/tmp.jKLSyjI6l6 ++ rm /tmp/tmp.71qO5ZOZTa /tmp/tmp.jKLSyjI6l6 ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.79Xg8rSR9x ++ mktemp + local LAST_ERR=/tmp/tmp.VWPxxsAJ4N + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-1.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.79Xg8rSR9x + cat /tmp/tmp.VWPxxsAJ4N + rm /tmp/tmp.79Xg8rSR9x /tmp/tmp.VWPxxsAJ4N + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + compare_mongo_cmd find myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local command=find + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongo 'use myApp\n db.test.find()' myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 mongodb '' + egrep -v 'I NETWORK|W NETWORK|F NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8dEnKhkHgn +++ mktemp ++ local LAST_ERR=/tmp/tmp.uNETfYHF4K ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8dEnKhkHgn ++ cat /tmp/tmp.uNETfYHF4K ++ rm /tmp/tmp.8dEnKhkHgn /tmp/tmp.uNETfYHF4K ++ return 0 + local client_container=psmdb-client-7469665986-j42fr + local mongo_flag= + [[ myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.AsEVKKm2Yg ++ mktemp + local LAST_ERR=/tmp/tmp.m0IQaEjGC4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-j42fr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-rs0-2.some-name-rs0.scheduled-backup-2969.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AsEVKKm2Yg + cat /tmp/tmp.m0IQaEjGC4 + rm /tmp/tmp.AsEVKKm2Yg /tmp/tmp.m0IQaEjGC4 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/e2e-tests/scheduled-backup/compare/find.json /tmp/tmp.qLgtWfiE09/find + sleep 60 + unlabel_node + desc 'remove labels' + set +o xtrace ----------------------------------------------------------------------------------- remove labels ----------------------------------------------------------------------------------- + kubectl_bin label nodes gke-jen-psmdb-1567-b27e0-default-pool-b4610de5-6drs backupWorker- --overwrite ++ mktemp + local LAST_OUT=/tmp/tmp.x46OKNd5iS ++ mktemp + local LAST_ERR=/tmp/tmp.cpUudigZet + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label nodes gke-jen-psmdb-1567-b27e0-default-pool-b4610de5-6drs backupWorker- --overwrite + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.x46OKNd5iS node/gke-jen-psmdb-1567-b27e0-default-pool-b4610de5-6drs labeled + cat /tmp/tmp.cpUudigZet + rm /tmp/tmp.x46OKNd5iS /tmp/tmp.cpUudigZet + return 0 + destroy scheduled-backup-2969 + local namespace=scheduled-backup-2969 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.MSn8lQQTOZ ++ mktemp + local LAST_ERR=/tmp/tmp.nuURr5US0k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MSn8lQQTOZ customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.nuURr5US0k + rm /tmp/tmp.MSn8lQQTOZ /tmp/tmp.nuURr5US0k + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n scheduled-backup-2969 cron-some-name-20240708170400-tvtc2 --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodbbackup.psmdb.percona.com/cron-some-name-20240708170400-tvtc2 patched + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n scheduled-backup-2969 cron-some-name-20240708171100-6j6kr --type=merge -p '{"metadata":{"finalizers":[]}}' E0708 17:19:19.175629 28416 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-12-0: the server could not find the requested resource perconaservermongodbbackup.psmdb.percona.com/cron-some-name-20240708171100-6j6kr patched + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.yRza5jKVhm ++ mktemp + local LAST_ERR=/tmp/tmp.4qYytTdRpv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yRza5jKVhm + cat /tmp/tmp.4qYytTdRpv + rm /tmp/tmp.yRza5jKVhm /tmp/tmp.4qYytTdRpv + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.uIYW3i0EOA ++ mktemp + local LAST_ERR=/tmp/tmp.aAoEGecL10 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uIYW3i0EOA + cat /tmp/tmp.aAoEGecL10 + rm /tmp/tmp.uIYW3i0EOA /tmp/tmp.aAoEGecL10 + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.O69PUwuWcT ++ mktemp + local LAST_ERR=/tmp/tmp.K6G6PVSIJK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.O69PUwuWcT + cat /tmp/tmp.K6G6PVSIJK + rm /tmp/tmp.O69PUwuWcT /tmp/tmp.K6G6PVSIJK + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.YidGcTenax ++ mktemp + local LAST_ERR=/tmp/tmp.MUhktE70Gp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1567/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YidGcTenax clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.MUhktE70Gp + rm /tmp/tmp.YidGcTenax /tmp/tmp.MUhktE70Gp + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.WwudGAaGxb ++ mktemp + local LAST_ERR=/tmp/tmp.NLWQ8sXUbt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.WwudGAaGxb + cat /tmp/tmp.NLWQ8sXUbt Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.WwudGAaGxb + cat /tmp/tmp.NLWQ8sXUbt Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.WwudGAaGxb + cat /tmp/tmp.NLWQ8sXUbt Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.WwudGAaGxb + cat /tmp/tmp.NLWQ8sXUbt Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.14.5/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.WwudGAaGxb /tmp/tmp.NLWQ8sXUbt + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + rm -rf /tmp/tmp.qLgtWfiE09 + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator + kubectl_bin delete --grace-period=0 --force=true namespace scheduled-backup-2969 ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.n525w3CdPr + local LAST_OUT=/tmp/tmp.S7mK93yCGK ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.J3CMLaMuhz + local exit_status=0 + local LAST_ERR=/tmp/tmp.wlN7ltCjQo + local timeout=4 + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace scheduled-backup-2969