Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/logs/version-service.log grep: warning: stray \ before - Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + create_infra version-service-22351 + local ns=version-service-22351 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.bbQQOlpebC ++ mktemp + local LAST_ERR=/tmp/tmp.Il5nBpNKdd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bbQQOlpebC customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.Il5nBpNKdd + rm /tmp/tmp.bbQQOlpebC /tmp/tmp.Il5nBpNKdd + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.N7fd8ps8ht ++ mktemp + local LAST_ERR=/tmp/tmp.6kQyJAhy3y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.N7fd8ps8ht + cat /tmp/tmp.6kQyJAhy3y + rm /tmp/tmp.N7fd8ps8ht /tmp/tmp.6kQyJAhy3y + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.YGEdmmCnFB ++ mktemp + local LAST_ERR=/tmp/tmp.F6QAQlbEXT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YGEdmmCnFB + cat /tmp/tmp.F6QAQlbEXT + rm /tmp/tmp.YGEdmmCnFB /tmp/tmp.F6QAQlbEXT + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.dZqV90TgyG ++ mktemp + local LAST_ERR=/tmp/tmp.D21g6SqG6O + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dZqV90TgyG customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com condition met + cat /tmp/tmp.D21g6SqG6O + rm /tmp/tmp.dZqV90TgyG /tmp/tmp.D21g6SqG6O + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.vQITB1jIQ4 ++ mktemp + local LAST_ERR=/tmp/tmp.3NzyCzbJAf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vQITB1jIQ4 clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.3NzyCzbJAf + rm /tmp/tmp.vQITB1jIQ4 /tmp/tmp.3NzyCzbJAf + return 0 + check_crd_for_deletion PR-2044-c13d0759 + local git_tag=PR-2044-c13d0759 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2044-c13d0759/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fcmHpohcxx +++ mktemp ++ local LAST_ERR=/tmp/tmp.1wiKFkDOU1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.fcmHpohcxx ++ cat /tmp/tmp.1wiKFkDOU1 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.fcmHpohcxx ++ cat /tmp/tmp.1wiKFkDOU1 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.fcmHpohcxx ++ cat /tmp/tmp.1wiKFkDOU1 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.fcmHpohcxx ++ cat /tmp/tmp.1wiKFkDOU1 Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.fcmHpohcxx /tmp/tmp.1wiKFkDOU1 ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrolebinding ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces psmdb-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.M8lP0UZy7s ++ mktemp + local LAST_OUT=/tmp/tmp.Mpbofi4xMe + local LAST_ERR=/tmp/tmp.OEaa7ma3cN ++ mktemp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.d9gDJtVuRM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.M8lP0UZy7s + cat /tmp/tmp.OEaa7ma3cN + rm /tmp/tmp.M8lP0UZy7s /tmp/tmp.OEaa7ma3cN + return 0 namespace "version-service-24525" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Mpbofi4xMe namespace "psmdb-operator" deleted + cat /tmp/tmp.d9gDJtVuRM + rm /tmp/tmp.Mpbofi4xMe /tmp/tmp.d9gDJtVuRM + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.DNpEtOf5Fm ++ mktemp + local LAST_ERR=/tmp/tmp.PLopsrCnGy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DNpEtOf5Fm + cat /tmp/tmp.PLopsrCnGy + rm /tmp/tmp.DNpEtOf5Fm /tmp/tmp.PLopsrCnGy + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.yjTlC1xN1I ++ mktemp + local LAST_ERR=/tmp/tmp.7Raude2sVR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yjTlC1xN1I namespace/psmdb-operator created + cat /tmp/tmp.7Raude2sVR + rm /tmp/tmp.yjTlC1xN1I /tmp/tmp.7Raude2sVR + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.tIR7p6pz1Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.ozUtdFkcSN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tIR7p6pz1Y ++ cat /tmp/tmp.ozUtdFkcSN ++ rm /tmp/tmp.tIR7p6pz1Y /tmp/tmp.ozUtdFkcSN ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2044-c13d0759-8-cluster8 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.wduPkF00I6 ++ mktemp + local LAST_ERR=/tmp/tmp.f9Y3lDADHP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2044-c13d0759-8-cluster8 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wduPkF00I6 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2044-c13d0759-8-cluster8" modified. + cat /tmp/tmp.f9Y3lDADHP + rm /tmp/tmp.wduPkF00I6 /tmp/tmp.f9Y3lDADHP + return 0 + deploy_operator + desc 'start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2044-c13d0759' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: perconalab/percona-server-mongodb-operator:PR-2044-c13d0759 ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.JWcAg9zQWL ++ mktemp + local LAST_ERR=/tmp/tmp.voKmQv3hOy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.JWcAg9zQWL customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.voKmQv3hOy + rm /tmp/tmp.JWcAg9zQWL /tmp/tmp.voKmQv3hOy + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cw-rbac.yaml + kubectl_bin apply -n psmdb-operator -f - + sed -e 's^namespace: .*^namespace: psmdb-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.Lx8JBWJFNG ++ mktemp + local LAST_ERR=/tmp/tmp.PjVlIDNKG7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Lx8JBWJFNG clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.PjVlIDNKG7 + rm /tmp/tmp.Lx8JBWJFNG /tmp/tmp.PjVlIDNKG7 + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-2044-c13d0759") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ZgcKiisCne ++ mktemp + local LAST_ERR=/tmp/tmp.g9OKGphJAx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZgcKiisCne deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.g9OKGphJAx + rm /tmp/tmp.ZgcKiisCne /tmp/tmp.g9OKGphJAx + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ai6YikUBip +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xoex3mrjNG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Ai6YikUBip ++ cat /tmp/tmp.Xoex3mrjNG ++ rm /tmp/tmp.Ai6YikUBip /tmp/tmp.Xoex3mrjNG ++ return 0 + wait_operator_pod percona-server-mongodb-operator-78bc54d4dc-kv2rx + local pod=percona-server-mongodb-operator-78bc54d4dc-kv2rx + set +o xtrace waiting for pod/percona-server-mongodb-operator-78bc54d4dc-kv2rx to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.27DAhfI8dm +++ mktemp ++ local LAST_ERR=/tmp/tmp.MzcyGtRDub ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.27DAhfI8dm ++ cat /tmp/tmp.MzcyGtRDub ++ rm /tmp/tmp.27DAhfI8dm /tmp/tmp.MzcyGtRDub ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-78bc54d4dc-kv2rx ++ mktemp + local LAST_OUT=/tmp/tmp.L4Ia1i6MyI ++ mktemp + local LAST_ERR=/tmp/tmp.vjSDgbt0jI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-78bc54d4dc-kv2rx + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.L4Ia1i6MyI + cat /tmp/tmp.vjSDgbt0jI + rm /tmp/tmp.L4Ia1i6MyI /tmp/tmp.vjSDgbt0jI + return 0 2025-12-10T15:02:32.337Z INFO setup Manager starting up {"gitCommit": "c13d075904d489b8e67ff8b025b492122b85931b", "gitBranch": "PR-2044-c13d0759", "buildTime": "", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} + create_namespace version-service-22351 + local namespace=version-service-22351 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces version-service-22351' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces version-service-22351 ----------------------------------------------------------------------------------- + xargs kubectl delete ns + kubectl_bin delete namespace version-service-22351 --ignore-not-found ++ mktemp ++ mktemp egrep: warning: egrep is obsolescent; using grep -E + local LAST_OUT=/tmp/tmp.jglA6wUnAT + local LAST_OUT=/tmp/tmp.pc3N8icg74 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.jbg7xfGkgr + local exit_status=0 + local timeout=4 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.aRx9VpIl6r + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace version-service-22351 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pc3N8icg74 + cat /tmp/tmp.aRx9VpIl6r + rm /tmp/tmp.pc3N8icg74 /tmp/tmp.aRx9VpIl6r + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jglA6wUnAT + cat /tmp/tmp.jbg7xfGkgr + rm /tmp/tmp.jglA6wUnAT /tmp/tmp.jbg7xfGkgr + return 0 + kubectl_bin wait --for=delete namespace version-service-22351 ++ mktemp + local LAST_OUT=/tmp/tmp.pQ5zvZx1DS ++ mktemp + local LAST_ERR=/tmp/tmp.vLDahcY7om + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace version-service-22351 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pQ5zvZx1DS + cat /tmp/tmp.vLDahcY7om + rm /tmp/tmp.pQ5zvZx1DS /tmp/tmp.vLDahcY7om + return 0 + desc 'create namespace version-service-22351' + set +o xtrace ----------------------------------------------------------------------------------- create namespace version-service-22351 ----------------------------------------------------------------------------------- + kubectl_bin create namespace version-service-22351 ++ mktemp + local LAST_OUT=/tmp/tmp.q3b3hVRESM ++ mktemp + local LAST_ERR=/tmp/tmp.qqVmq2pewh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace version-service-22351 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.q3b3hVRESM namespace/version-service-22351 created + cat /tmp/tmp.qqVmq2pewh + rm /tmp/tmp.q3b3hVRESM /tmp/tmp.qqVmq2pewh + return 0 + set_kube_ctx version-service-22351 + local namespace=version-service-22351 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.8HJ48nxwba +++ mktemp ++ local LAST_ERR=/tmp/tmp.wJxyzeGlKE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.8HJ48nxwba ++ cat /tmp/tmp.wJxyzeGlKE ++ rm /tmp/tmp.8HJ48nxwba /tmp/tmp.wJxyzeGlKE ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2044-c13d0759-8-cluster8 --namespace=version-service-22351 ++ mktemp + local LAST_OUT=/tmp/tmp.AgUFLwFIvK ++ mktemp + local LAST_ERR=/tmp/tmp.BBUAb516ol + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2044-c13d0759-8-cluster8 --namespace=version-service-22351 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AgUFLwFIvK Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2044-c13d0759-8-cluster8" modified. + cat /tmp/tmp.BBUAb516ol + rm /tmp/tmp.AgUFLwFIvK /tmp/tmp.BBUAb516ol + return 0 + desc 'install version service' + set +o xtrace ----------------------------------------------------------------------------------- install version service ----------------------------------------------------------------------------------- + kubectl_bin create configmap -n psmdb-operator versions --from-file /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/operator.9.9.9.psmdb-operator.dep.json --from-file /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/operator.9.9.9.psmdb-operator.json ++ mktemp + local LAST_OUT=/tmp/tmp.81T29mml26 ++ mktemp + local LAST_ERR=/tmp/tmp.RgVmqYTk1F + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create configmap -n psmdb-operator versions --from-file /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/operator.9.9.9.psmdb-operator.dep.json --from-file /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/operator.9.9.9.psmdb-operator.json + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.81T29mml26 configmap/versions created + cat /tmp/tmp.RgVmqYTk1F + rm /tmp/tmp.81T29mml26 /tmp/tmp.RgVmqYTk1F + return 0 + kubectl_bin apply -n psmdb-operator -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/vs.yml ++ mktemp + local LAST_OUT=/tmp/tmp.QPO60BIN4N ++ mktemp + local LAST_ERR=/tmp/tmp.PHVAFqLdz2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/vs.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QPO60BIN4N deployment.apps/version-service created service/version-service created + cat /tmp/tmp.PHVAFqLdz2 + rm /tmp/tmp.QPO60BIN4N /tmp/tmp.PHVAFqLdz2 + return 0 + sleep 10 + yq eval '(.. | select(tag == "!!str")) |= sub("version-service$", "version-service-cr")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/vs.yml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.PldKEA1mdt ++ mktemp + local LAST_ERR=/tmp/tmp.G5zbvwKb9A + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PldKEA1mdt deployment.apps/version-service-cr created service/version-service-cr created + cat /tmp/tmp.G5zbvwKb9A + rm /tmp/tmp.PldKEA1mdt /tmp/tmp.G5zbvwKb9A + return 0 + kubectl_bin -n psmdb-operator set env deploy/percona-server-mongodb-operator PERCONA_VS_FALLBACK_URI=http://version-service:11000 ++ mktemp + local LAST_OUT=/tmp/tmp.WlsBmvcNGr ++ mktemp + local LAST_ERR=/tmp/tmp.BbfE9oa1Ix + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl -n psmdb-operator set env deploy/percona-server-mongodb-operator PERCONA_VS_FALLBACK_URI=http://version-service:11000 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.WlsBmvcNGr deployment.apps/percona-server-mongodb-operator env updated + cat /tmp/tmp.BbfE9oa1Ix + rm /tmp/tmp.WlsBmvcNGr /tmp/tmp.BbfE9oa1Ix + return 0 + sleep 30 + desc 'enable telemetry on operator level' + set +o xtrace ----------------------------------------------------------------------------------- enable telemetry on operator level ----------------------------------------------------------------------------------- + kubectl_bin get deployment/percona-server-mongodb-operator -o yaml -n psmdb-operator + yq eval '(.spec.template.spec.containers[0].env[] | select(.name == "DISABLE_TELEMETRY").value) = "false"' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.1nE23Zs71z + local LAST_OUT=/tmp/tmp.xuLMBVEHB2 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.9CwptMXBIE + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.bQilOxz5Bp + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + for i in $(seq 0 2) + set +e + kubectl get deployment/percona-server-mongodb-operator -o yaml -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.1nE23Zs71z + cat /tmp/tmp.bQilOxz5Bp + rm /tmp/tmp.1nE23Zs71z /tmp/tmp.bQilOxz5Bp + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xuLMBVEHB2 deployment.apps/percona-server-mongodb-operator configured + cat /tmp/tmp.9CwptMXBIE + rm /tmp/tmp.xuLMBVEHB2 /tmp/tmp.9CwptMXBIE + return 0 + wait_deployment percona-server-mongodb-operator + local name=percona-server-mongodb-operator + sleep 10 + retry=0 + echo -n percona-server-mongodb-operator percona-server-mongodb-operator+ kubectl_bin get deployment percona-server-mongodb-operator -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.pN9e1TbmiW ++ mktemp + local LAST_ERR=/tmp/tmp.tMBDfaHzuf + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get deployment percona-server-mongodb-operator -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.pN9e1TbmiW + cat /tmp/tmp.tMBDfaHzuf + rm /tmp/tmp.pN9e1TbmiW /tmp/tmp.tMBDfaHzuf + return 0 ++ kubectl_bin get deployment percona-server-mongodb-operator -o 'jsonpath={.status.replicas}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.fhTsc6zZBF +++ mktemp ++ local LAST_ERR=/tmp/tmp.kGVRSDjT36 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get deployment percona-server-mongodb-operator -o 'jsonpath={.status.replicas}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fhTsc6zZBF ++ cat /tmp/tmp.kGVRSDjT36 ++ rm /tmp/tmp.fhTsc6zZBF /tmp/tmp.kGVRSDjT36 ++ return 0 ++ kubectl_bin get deployment percona-server-mongodb-operator -o 'jsonpath={.status.readyReplicas}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.aG55pOLNpi +++ mktemp ++ local LAST_ERR=/tmp/tmp.ON2fjuT1Wh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get deployment percona-server-mongodb-operator -o 'jsonpath={.status.readyReplicas}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.aG55pOLNpi ++ cat /tmp/tmp.ON2fjuT1Wh ++ rm /tmp/tmp.aG55pOLNpi /tmp/tmp.ON2fjuT1Wh ++ return 0 + '[' 1 == 1 ']' + check_telemetry_transfer http://version-service-cr:11000 disabled enabled + local cr_vs_uri=http://version-service-cr:11000 + local cr_vs_channel=disabled + local telemetry_state=enabled + cluster=minimal-cluster + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.DDzXiTd6FI ++ mktemp + local LAST_ERR=/tmp/tmp.32eUHQgeI2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DDzXiTd6FI deployment.apps/psmdb-client created + cat /tmp/tmp.32eUHQgeI2 + rm /tmp/tmp.DDzXiTd6FI /tmp/tmp.32eUHQgeI2 + return 0 + yq eval '.metadata.name = "minimal-cluster"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.REh22HFZ4D ++ mktemp + local LAST_ERR=/tmp/tmp.Vv3C5JXcuX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.REh22HFZ4D secret/minimal-cluster created + cat /tmp/tmp.Vv3C5JXcuX + rm /tmp/tmp.REh22HFZ4D /tmp/tmp.Vv3C5JXcuX + return 0 + desc 'create PSMDB minimal cluster minimal-cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB minimal cluster minimal-cluster ----------------------------------------------------------------------------------- + yq eval ' .spec.upgradeOptions.versionServiceEndpoint = "http://version-service-cr:11000" | .spec.upgradeOptions.apply = "disabled" | .spec.initImage = "perconalab/percona-server-mongodb-operator:PR-2044-c13d0759" | .spec.crVersion = "9.9.9" | .spec.image = "perconalab/percona-server-mongodb-operator:main-mongod8.0" | .spec.pmm.image = "percona/pmm-client:2.44.1-1" | .spec.backup.enabled = false | .spec.backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cr-minimal.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.6IPcq37yC5 ++ mktemp + local LAST_ERR=/tmp/tmp.EN2spbesNe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6IPcq37yC5 perconaservermongodb.psmdb.percona.com/minimal-cluster created + cat /tmp/tmp.EN2spbesNe + rm /tmp/tmp.6IPcq37yC5 /tmp/tmp.EN2spbesNe + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running minimal-cluster-rs0 1 + local name=minimal-cluster-rs0 + let last_pod=0 + : + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=minimal-cluster ++ seq 0 0 + for i in $(seq 0 $last_pod) + [[ 0 -eq 0 ]] ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FkOxrKI8FC +++ mktemp ++ local LAST_ERR=/tmp/tmp.CYkrnFJUwT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FkOxrKI8FC ++ cat /tmp/tmp.CYkrnFJUwT ++ rm /tmp/tmp.FkOxrKI8FC /tmp/tmp.CYkrnFJUwT ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod minimal-cluster-rs0-0 + local pod=minimal-cluster-rs0-0 + set +o xtrace waiting for pod/minimal-cluster-rs0-0 to be ready........................OK ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hHO1hZ4za5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2E2OCmziNv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.hHO1hZ4za5 ++ cat /tmp/tmp.2E2OCmziNv ++ rm /tmp/tmp.hHO1hZ4za5 /tmp/tmp.2E2OCmziNv ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dKaSG1CvTb +++ mktemp ++ local LAST_ERR=/tmp/tmp.MnTc46dYP7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dKaSG1CvTb ++ cat /tmp/tmp.MnTc46dYP7 ++ rm /tmp/tmp.dKaSG1CvTb /tmp/tmp.MnTc46dYP7 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness............................ + sleep 20 + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gWTaV61Zg5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ifODmFYGVa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gWTaV61Zg5 ++ cat /tmp/tmp.ifODmFYGVa ++ rm /tmp/tmp.gWTaV61Zg5 /tmp/tmp.ifODmFYGVa ++ return 0 + local client_container=psmdb-client-696897d69b-lg66j + local mongo_flag= + [[ userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-lg66j -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.YfErYHjisZ ++ mktemp + local LAST_ERR=/tmp/tmp.vgKzr88Wqe + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-lg66j -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YfErYHjisZ Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://minimal-cluster-rs0-0.minimal-cluster-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("d6fa3694-b02f-49ac-85e9-00c79e0c2c43") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.vgKzr88Wqe + rm /tmp/tmp.YfErYHjisZ /tmp/tmp.vgKzr88Wqe + return 0 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@minimal-cluster-rs0.version-service-22351 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@minimal-cluster-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0q1JIM2UGg +++ mktemp ++ local LAST_ERR=/tmp/tmp.fyoKBt5EXq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0q1JIM2UGg ++ cat /tmp/tmp.fyoKBt5EXq ++ rm /tmp/tmp.0q1JIM2UGg /tmp/tmp.fyoKBt5EXq ++ return 0 + local client_container=psmdb-client-696897d69b-lg66j + local mongo_flag= + [[ myApp:myPass@minimal-cluster-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-lg66j -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.wr7w4L4iFI ++ mktemp + local LAST_ERR=/tmp/tmp.j2FMgYeLAL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-lg66j -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wr7w4L4iFI Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://minimal-cluster-rs0-0.minimal-cluster-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("0be2c74f-0c79-4294-9342-ffd656f6d2fc") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.j2FMgYeLAL + rm /tmp/tmp.wr7w4L4iFI /tmp/tmp.j2FMgYeLAL + return 0 + desc 'check telemetry' + set +o xtrace ----------------------------------------------------------------------------------- check telemetry ----------------------------------------------------------------------------------- + grep -E 'server request payload|unary call' + grep -Eo '\{.*\}' + jq 'del(."grpc.request.content".msg.customResourceUid)' + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.start_time")' + jq 'del(."grpc.time_ms")' ++ kubectl get pods --selector=run=version-service-cr -o 'jsonpath={.items[0].metadata.name}' -n psmdb-operator + kubectl_bin logs version-service-cr-6fcc64c58f-vnkxl -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.q5gbhg03p0 ++ mktemp + local LAST_ERR=/tmp/tmp.qYcQuMKJ48 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs version-service-cr-6fcc64c58f-vnkxl -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.q5gbhg03p0 + cat /tmp/tmp.qYcQuMKJ48 + rm /tmp/tmp.q5gbhg03p0 /tmp/tmp.qYcQuMKJ48 + return 0 + grep -E 'server request payload|unary call' + grep -Eo '\{.*\}' + jq 'del(."grpc.request.content".msg.customResourceUid)' ++ kubectl get pods --selector=run=version-service -o 'jsonpath={.items[0].metadata.name}' -n psmdb-operator + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.start_time")' + jq 'del(."grpc.time_ms")' + kubectl_bin logs version-service-65bc6b945c-g6gzt -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.rNM5d3LXXY ++ mktemp + local LAST_ERR=/tmp/tmp.pQtbCJla3I + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs version-service-65bc6b945c-g6gzt -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rNM5d3LXXY + cat /tmp/tmp.pQtbCJla3I + rm /tmp/tmp.rNM5d3LXXY /tmp/tmp.pQtbCJla3I + return 0 + local telemetry_log_file=enabled_telemetry.version-service.log-cw.json + desc 'telemetry was disabled in CR but in operator not' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR but in operator not ----------------------------------------------------------------------------------- + '[' disabled == disabled -a enabled == enabled ']' + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/enabled_telemetry.version-service.log-cw.json /dev/fd/63 ++ grep -f /tmp/tmp.QfOIVStdwq/enabled_telemetry.version-service.log.json /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/enabled_telemetry.version-service.log-cw.json + [[ -s /tmp/tmp.QfOIVStdwq/enabled_telemetry.version-service-cr.log.json ]] + local telemetry_cr_log_file=enabled_telemetry.version-service-cr.log-cw.json + local image_prefix=disabled + desc 'telemetry was disabled in operator but not in CR' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in operator but not in CR ----------------------------------------------------------------------------------- + '[' disabled == disabled-recommended -a enabled == disabled ']' + desc 'telemetry was disabled in CR as well as in operator' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR as well as in operator ----------------------------------------------------------------------------------- + '[' disabled == disabled -a enabled == disabled ']' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.zbDC6GhTZD +++ mktemp ++ local LAST_ERR=/tmp/tmp.CSVGfQdZXo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.zbDC6GhTZD ++ cat /tmp/tmp.CSVGfQdZXo ++ rm /tmp/tmp.zbDC6GhTZD /tmp/tmp.CSVGfQdZXo ++ return 0 + kubectl_bin delete pod -n psmdb-operator percona-server-mongodb-operator-557ddd99bf-ngjmc ++ mktemp + local LAST_OUT=/tmp/tmp.Z3Qmgh3txa ++ mktemp + local LAST_ERR=/tmp/tmp.poL5MvlfQZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pod -n psmdb-operator percona-server-mongodb-operator-557ddd99bf-ngjmc + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Z3Qmgh3txa pod "percona-server-mongodb-operator-557ddd99bf-ngjmc" deleted from psmdb-operator namespace + cat /tmp/tmp.poL5MvlfQZ + rm /tmp/tmp.Z3Qmgh3txa /tmp/tmp.poL5MvlfQZ + return 0 + kubectl_bin patch psmdb minimal-cluster --type=merge -p '{"metadata":{"finalizers":["percona.com/delete-psmdb-pvc"]}}' ++ mktemp + local LAST_OUT=/tmp/tmp.a8QR6vYVIr ++ mktemp + local LAST_ERR=/tmp/tmp.sSrgGKBt8e + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb minimal-cluster --type=merge -p '{"metadata":{"finalizers":["percona.com/delete-psmdb-pvc"]}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.a8QR6vYVIr perconaservermongodb.psmdb.percona.com/minimal-cluster patched + cat /tmp/tmp.sSrgGKBt8e + rm /tmp/tmp.a8QR6vYVIr /tmp/tmp.sSrgGKBt8e + return 0 + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.Zb3abKBGTM ++ mktemp + local LAST_ERR=/tmp/tmp.UT3j9nuHkL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Zb3abKBGTM perconaservermongodb.psmdb.percona.com "minimal-cluster" deleted from version-service-22351 namespace + cat /tmp/tmp.UT3j9nuHkL + rm /tmp/tmp.Zb3abKBGTM /tmp/tmp.UT3j9nuHkL + return 0 + kubectl_bin delete deploy psmdb-client ++ mktemp + local LAST_OUT=/tmp/tmp.AHmXKCxy3I ++ mktemp + local LAST_ERR=/tmp/tmp.iqsPGUXJAI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete deploy psmdb-client + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.AHmXKCxy3I deployment.apps "psmdb-client" deleted from version-service-22351 namespace + cat /tmp/tmp.iqsPGUXJAI + rm /tmp/tmp.AHmXKCxy3I /tmp/tmp.iqsPGUXJAI + return 0 + sleep 30 + desc 'disabling telemetry on the operator level' + set +o xtrace ----------------------------------------------------------------------------------- disabling telemetry on the operator level ----------------------------------------------------------------------------------- + kubectl_bin delete pod -l run=version-service-cr -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.kH9TOJfACs ++ mktemp + local LAST_ERR=/tmp/tmp.GwjTKiLF9G + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pod -l run=version-service-cr -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kH9TOJfACs pod "version-service-cr-6fcc64c58f-vnkxl" deleted from psmdb-operator namespace + cat /tmp/tmp.GwjTKiLF9G + rm /tmp/tmp.kH9TOJfACs /tmp/tmp.GwjTKiLF9G + return 0 + kubectl_bin delete pod -l run=version-service -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.C4mYcpFe6M ++ mktemp + local LAST_ERR=/tmp/tmp.9ENjBnATg5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pod -l run=version-service -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.C4mYcpFe6M pod "version-service-65bc6b945c-g6gzt" deleted from psmdb-operator namespace + cat /tmp/tmp.9ENjBnATg5 + rm /tmp/tmp.C4mYcpFe6M /tmp/tmp.9ENjBnATg5 + return 0 + kubectl_bin get deployment/percona-server-mongodb-operator -o yaml -n psmdb-operator + yq eval '(.spec.template.spec.containers[0].env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.nyAO7oRgmv + local LAST_OUT=/tmp/tmp.EIMllVdk3Y ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.gw5Clz4bUy + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.RNu7ntn4Qr + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get deployment/percona-server-mongodb-operator -o yaml -n psmdb-operator + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nyAO7oRgmv + cat /tmp/tmp.gw5Clz4bUy + rm /tmp/tmp.nyAO7oRgmv /tmp/tmp.gw5Clz4bUy + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EIMllVdk3Y deployment.apps/percona-server-mongodb-operator configured + cat /tmp/tmp.RNu7ntn4Qr + rm /tmp/tmp.EIMllVdk3Y /tmp/tmp.RNu7ntn4Qr + return 0 ++ get_mongod_ver_from_image perconalab/percona-server-mongodb-operator:main-mongod8.0 ++ local image=perconalab/percona-server-mongodb-operator:main-mongod8.0 +++ run_simple_cli_inside_image perconalab/percona-server-mongodb-operator:main-mongod8.0 'mongod --version' +++ /usr/sbin/sed -r 's/^.*db version v(([0-9]+\.){2}[0-9]+-[0-9]+).*$/\1/g' +++ local image=perconalab/percona-server-mongodb-operator:main-mongod8.0 +++ local 'cli=mongod --version' +++ local pod_name=4251 +++ kubectl_bin -n default run 4251 --image=perconalab/percona-server-mongodb-operator:main-mongod8.0 --restart=Never --command -- sleep infinity ++++ mktemp +++ local LAST_OUT=/tmp/tmp.M8XHdrorsN ++++ mktemp +++ local LAST_ERR=/tmp/tmp.U2b54eN7e4 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl -n default run 4251 --image=perconalab/percona-server-mongodb-operator:main-mongod8.0 --restart=Never --command -- sleep infinity +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.M8XHdrorsN +++ cat /tmp/tmp.U2b54eN7e4 +++ rm /tmp/tmp.M8XHdrorsN /tmp/tmp.U2b54eN7e4 +++ return 0 +++ kubectl_bin -n default wait --for=condition=Ready pod/4251 ++++ mktemp +++ local LAST_OUT=/tmp/tmp.dJ8nqSsJny ++++ mktemp +++ local LAST_ERR=/tmp/tmp.vTTs5PONR8 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl -n default wait --for=condition=Ready pod/4251 +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.dJ8nqSsJny +++ cat /tmp/tmp.vTTs5PONR8 +++ rm /tmp/tmp.dJ8nqSsJny /tmp/tmp.vTTs5PONR8 +++ return 0 ++++ kubectl_bin -n default exec 4251 -- bash -c 'mongod --version 2>&1' +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.BBF9sGXE0C +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.ymZ3aHApZm ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in $(seq 0 2) ++++ set +e ++++ kubectl -n default exec 4251 -- bash -c 'mongod --version 2>&1' ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.BBF9sGXE0C ++++ cat /tmp/tmp.ymZ3aHApZm ++++ rm /tmp/tmp.BBF9sGXE0C /tmp/tmp.ymZ3aHApZm ++++ return 0 +++ local 'output=db version v8.0.16-5 Build Info: { "version": "8.0.16-5", "gitVersion": "f174b291665d4de5c104d07bff06eabe7a913868", "openSSLVersion": "OpenSSL 3.5.1 1 Jul 2025", "modules": [], "perconaFeatures": [ "MemoryEngine", "HotBackup", "BackupCursorAggregationStage", "BackupCursorExtendAggregationStage", "AWSIAM", "Kerberos", "LDAP", "OIDC", "TDE", "FIPSMode", "FCBIS", "Auditing", "ProfilingRateLimit", "LogRedaction", "ngram" ], "allocator": "tcmalloc-google", "environment": { "distarch": "x86_64", "target_arch": "x86_64" } }' +++ kubectl_bin -n default delete pod/4251 --grace-period=0 --force ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ghpI3d2Wpv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.jvEmvdJ4vZ +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in $(seq 0 2) +++ set +e +++ kubectl -n default delete pod/4251 --grace-period=0 --force +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.ghpI3d2Wpv +++ cat /tmp/tmp.jvEmvdJ4vZ Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. +++ rm /tmp/tmp.ghpI3d2Wpv /tmp/tmp.jvEmvdJ4vZ +++ return 0 +++ echo db version v8.0.16-5 Build Info: '{' '"version":' '"8.0.16-5",' '"gitVersion":' '"f174b291665d4de5c104d07bff06eabe7a913868",' '"openSSLVersion":' '"OpenSSL' 3.5.1 1 Jul '2025",' '"modules":' '[],' '"perconaFeatures":' '[' '"MemoryEngine",' '"HotBackup",' '"BackupCursorAggregationStage",' '"BackupCursorExtendAggregationStage",' '"AWSIAM",' '"Kerberos",' '"LDAP",' '"OIDC",' '"TDE",' '"FIPSMode",' '"FCBIS",' '"Auditing",' '"ProfilingRateLimit",' '"LogRedaction",' '"ngram"' '],' '"allocator":' '"tcmalloc-google",' '"environment":' '{' '"distarch":' '"x86_64",' '"target_arch":' '"x86_64"' '}' '}' ++ version_info=8.0.16-5 ++ [[ ! 8.0.16-5 =~ ^([0-9]+\.){2}[0-9]+-[0-9]+$ ]] ++ echo 8.0.16-5 + ACTUAL_MONGOD_VERSION=8.0.16-5 + wait_deployment percona-server-mongodb-operator + local name=percona-server-mongodb-operator + sleep 10 + retry=0 + echo -n percona-server-mongodb-operator percona-server-mongodb-operator+ kubectl_bin get deployment percona-server-mongodb-operator -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.rFrLRgrZo3 ++ mktemp + local LAST_ERR=/tmp/tmp.ug10BxfzhD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get deployment percona-server-mongodb-operator -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rFrLRgrZo3 + cat /tmp/tmp.ug10BxfzhD + rm /tmp/tmp.rFrLRgrZo3 /tmp/tmp.ug10BxfzhD + return 0 ++ kubectl_bin get deployment percona-server-mongodb-operator -o 'jsonpath={.status.replicas}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.6WGaEf59Rb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Osz0nuBwLJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get deployment percona-server-mongodb-operator -o 'jsonpath={.status.replicas}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.6WGaEf59Rb ++ cat /tmp/tmp.Osz0nuBwLJ ++ rm /tmp/tmp.6WGaEf59Rb /tmp/tmp.Osz0nuBwLJ ++ return 0 ++ kubectl_bin get deployment percona-server-mongodb-operator -o 'jsonpath={.status.readyReplicas}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.dxqj1BnnVq +++ mktemp ++ local LAST_ERR=/tmp/tmp.rF7FX4W8Yq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get deployment percona-server-mongodb-operator -o 'jsonpath={.status.readyReplicas}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.dxqj1BnnVq ++ cat /tmp/tmp.rF7FX4W8Yq ++ rm /tmp/tmp.dxqj1BnnVq /tmp/tmp.rF7FX4W8Yq ++ return 0 + '[' 1 == 1 ']' + check_telemetry_transfer http://version-service-cr:11000 8.0-recommended disabled + local cr_vs_uri=http://version-service-cr:11000 + local cr_vs_channel=8.0-recommended + local telemetry_state=disabled + cluster=minimal-cluster + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.fcd2IZ8kiT ++ mktemp + local LAST_ERR=/tmp/tmp.gs8iPXgiou + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fcd2IZ8kiT deployment.apps/psmdb-client created + cat /tmp/tmp.gs8iPXgiou + rm /tmp/tmp.fcd2IZ8kiT /tmp/tmp.gs8iPXgiou + return 0 + yq eval '.metadata.name = "minimal-cluster"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.vgVUs1wFz8 ++ mktemp + local LAST_ERR=/tmp/tmp.19PlIWExfo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vgVUs1wFz8 secret/minimal-cluster configured + cat /tmp/tmp.19PlIWExfo Warning: resource secrets/minimal-cluster is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.vgVUs1wFz8 /tmp/tmp.19PlIWExfo + return 0 + desc 'create PSMDB minimal cluster minimal-cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB minimal cluster minimal-cluster ----------------------------------------------------------------------------------- + yq eval ' .spec.upgradeOptions.versionServiceEndpoint = "http://version-service-cr:11000" | .spec.upgradeOptions.apply = "8.0-recommended" | .spec.initImage = "perconalab/percona-server-mongodb-operator:PR-2044-c13d0759" | .spec.crVersion = "9.9.9" | .spec.image = "perconalab/percona-server-mongodb-operator:main-mongod8.0" | .spec.pmm.image = "percona/pmm-client:2.44.1-1" | .spec.backup.enabled = false | .spec.backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cr-minimal.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.flYPxeZKHJ ++ mktemp + local LAST_ERR=/tmp/tmp.TVmQtLnXOj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.flYPxeZKHJ perconaservermongodb.psmdb.percona.com/minimal-cluster created + cat /tmp/tmp.TVmQtLnXOj + rm /tmp/tmp.flYPxeZKHJ /tmp/tmp.TVmQtLnXOj + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running minimal-cluster-rs0 1 + local name=minimal-cluster-rs0 + let last_pod=0 + : + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=minimal-cluster ++ seq 0 0 + for i in $(seq 0 $last_pod) + [[ 0 -eq 0 ]] ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kcePzWbAvH +++ mktemp ++ local LAST_ERR=/tmp/tmp.sG26RdMOPx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kcePzWbAvH ++ cat /tmp/tmp.sG26RdMOPx ++ rm /tmp/tmp.kcePzWbAvH /tmp/tmp.sG26RdMOPx ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod minimal-cluster-rs0-0 + local pod=minimal-cluster-rs0-0 + set +o xtrace waiting for pod/minimal-cluster-rs0-0 to be ready.......................OK ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WEX2QYlBd8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OL289NOOVs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WEX2QYlBd8 ++ cat /tmp/tmp.OL289NOOVs ++ rm /tmp/tmp.WEX2QYlBd8 /tmp/tmp.OL289NOOVs ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.onOGIlxdaD +++ mktemp ++ local LAST_ERR=/tmp/tmp.sWz9MtpyXz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.onOGIlxdaD ++ cat /tmp/tmp.sWz9MtpyXz ++ rm /tmp/tmp.onOGIlxdaD /tmp/tmp.sWz9MtpyXz ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness............................. + sleep 20 + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PMPaduX3Yh +++ mktemp ++ local LAST_ERR=/tmp/tmp.C1tOE2lKQE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PMPaduX3Yh ++ cat /tmp/tmp.C1tOE2lKQE ++ rm /tmp/tmp.PMPaduX3Yh /tmp/tmp.C1tOE2lKQE ++ return 0 + local client_container=psmdb-client-696897d69b-bbzzn + local mongo_flag= + [[ userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-bbzzn -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.7vsvFjMgYC ++ mktemp + local LAST_ERR=/tmp/tmp.frtb49j2ke + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-bbzzn -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7vsvFjMgYC Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://minimal-cluster-rs0-0.minimal-cluster-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("6f58278e-df6b-45a7-ad9f-89b753b1a2b1") } Percona Server for MongoDB server version: v8.0.4-1 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.frtb49j2ke + rm /tmp/tmp.7vsvFjMgYC /tmp/tmp.frtb49j2ke + return 0 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@minimal-cluster-rs0.version-service-22351 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@minimal-cluster-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7mQCCrDtQt +++ mktemp ++ local LAST_ERR=/tmp/tmp.D2kec4HjCb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7mQCCrDtQt ++ cat /tmp/tmp.D2kec4HjCb ++ rm /tmp/tmp.7mQCCrDtQt /tmp/tmp.D2kec4HjCb ++ return 0 + local client_container=psmdb-client-696897d69b-bbzzn + local mongo_flag= + [[ myApp:myPass@minimal-cluster-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-bbzzn -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Jj6D456m5h ++ mktemp + local LAST_ERR=/tmp/tmp.tq72ziwv2L + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-bbzzn -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Jj6D456m5h Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://minimal-cluster-rs0-0.minimal-cluster-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ee0a1590-59e5-4708-9819-168588c1c994") } Percona Server for MongoDB server version: v8.0.4-1 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.tq72ziwv2L + rm /tmp/tmp.Jj6D456m5h /tmp/tmp.tq72ziwv2L + return 0 + desc 'check telemetry' + set +o xtrace ----------------------------------------------------------------------------------- check telemetry ----------------------------------------------------------------------------------- + grep -E 'server request payload|unary call' + grep -Eo '\{.*\}' + jq 'del(."grpc.request.content".msg.customResourceUid)' ++ kubectl get pods --selector=run=version-service-cr -o 'jsonpath={.items[0].metadata.name}' -n psmdb-operator + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.start_time")' + jq 'del(."grpc.time_ms")' + kubectl_bin logs version-service-cr-6fcc64c58f-29zcf -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.bPxfznFUQP ++ mktemp + local LAST_ERR=/tmp/tmp.udDP78b4Db + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs version-service-cr-6fcc64c58f-29zcf -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.bPxfznFUQP + cat /tmp/tmp.udDP78b4Db + rm /tmp/tmp.bPxfznFUQP /tmp/tmp.udDP78b4Db + return 0 + grep -E 'server request payload|unary call' ++ kubectl get pods --selector=run=version-service -o 'jsonpath={.items[0].metadata.name}' -n psmdb-operator + grep -Eo '\{.*\}' + jq 'del(."grpc.request.content".msg.customResourceUid)' + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.start_time")' + jq 'del(."grpc.time_ms")' + kubectl_bin logs version-service-65bc6b945c-kcwjw -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.zmXHE2xGuz ++ mktemp + local LAST_ERR=/tmp/tmp.Cziga2030m + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs version-service-65bc6b945c-kcwjw -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zmXHE2xGuz + cat /tmp/tmp.Cziga2030m + rm /tmp/tmp.zmXHE2xGuz /tmp/tmp.Cziga2030m + return 0 + local telemetry_log_file=disabled_telemetry.version-service.log-cw.json + desc 'telemetry was disabled in CR but in operator not' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR but in operator not ----------------------------------------------------------------------------------- + '[' 8.0-recommended == disabled -a disabled == enabled ']' + local telemetry_cr_log_file=disabled_telemetry.version-service-cr.log-cw.json + local image_prefix=8.0 + desc 'telemetry was disabled in operator but not in CR' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in operator but not in CR ----------------------------------------------------------------------------------- + '[' 8.0-recommended == 8.0-recommended -a disabled == disabled ']' + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/disabled_telemetry.version-service-cr.log-cw.json /dev/fd/63 ++ grep -f /tmp/tmp.QfOIVStdwq/disabled_telemetry.version-service-cr.log.json /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/disabled_telemetry.version-service-cr.log-cw.json + [[ -s /tmp/tmp.QfOIVStdwq/disabled_telemetry.version-service.log.json ]] + desc 'telemetry was disabled in CR as well as in operator' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR as well as in operator ----------------------------------------------------------------------------------- + '[' 8.0-recommended == disabled -a disabled == disabled ']' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.yZevVCc9Y7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.u83qB4Fn2t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yZevVCc9Y7 ++ cat /tmp/tmp.u83qB4Fn2t ++ rm /tmp/tmp.yZevVCc9Y7 /tmp/tmp.u83qB4Fn2t ++ return 0 + kubectl_bin delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-xxdsl ++ mktemp + local LAST_OUT=/tmp/tmp.5pvFv4SbKP ++ mktemp + local LAST_ERR=/tmp/tmp.wqFL0p1lHU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-xxdsl + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5pvFv4SbKP pod "percona-server-mongodb-operator-79456db78d-xxdsl" deleted from psmdb-operator namespace + cat /tmp/tmp.wqFL0p1lHU + rm /tmp/tmp.5pvFv4SbKP /tmp/tmp.wqFL0p1lHU + return 0 + kubectl_bin patch psmdb minimal-cluster --type=merge -p '{"metadata":{"finalizers":["percona.com/delete-psmdb-pvc"]}}' ++ mktemp + local LAST_OUT=/tmp/tmp.EIV4hEWSIs ++ mktemp + local LAST_ERR=/tmp/tmp.UKNFa4Dd0H + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb minimal-cluster --type=merge -p '{"metadata":{"finalizers":["percona.com/delete-psmdb-pvc"]}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EIV4hEWSIs perconaservermongodb.psmdb.percona.com/minimal-cluster patched + cat /tmp/tmp.UKNFa4Dd0H + rm /tmp/tmp.EIV4hEWSIs /tmp/tmp.UKNFa4Dd0H + return 0 + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.hIqPJ4BUF6 ++ mktemp + local LAST_ERR=/tmp/tmp.Fa9nTl18Jj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hIqPJ4BUF6 perconaservermongodb.psmdb.percona.com "minimal-cluster" deleted from version-service-22351 namespace + cat /tmp/tmp.Fa9nTl18Jj + rm /tmp/tmp.hIqPJ4BUF6 /tmp/tmp.Fa9nTl18Jj + return 0 + kubectl_bin delete deploy psmdb-client ++ mktemp + local LAST_OUT=/tmp/tmp.Jbq0dyLRXR ++ mktemp + local LAST_ERR=/tmp/tmp.7Al9RHnku6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete deploy psmdb-client + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Jbq0dyLRXR deployment.apps "psmdb-client" deleted from version-service-22351 namespace + cat /tmp/tmp.7Al9RHnku6 + rm /tmp/tmp.Jbq0dyLRXR /tmp/tmp.7Al9RHnku6 + return 0 + sleep 30 + kubectl_bin get deployment/percona-server-mongodb-operator -o yaml -n psmdb-operator + yq eval '(.spec.template.spec.containers[0].env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.CQW1SI1z2C + local LAST_OUT=/tmp/tmp.6CViyh05zX ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.lC3RgmeNzu + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.8I7PeVLVbw + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + for i in $(seq 0 2) + set +e + kubectl get deployment/percona-server-mongodb-operator -o yaml -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6CViyh05zX + cat /tmp/tmp.8I7PeVLVbw + rm /tmp/tmp.6CViyh05zX /tmp/tmp.8I7PeVLVbw + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.CQW1SI1z2C deployment.apps/percona-server-mongodb-operator configured + cat /tmp/tmp.lC3RgmeNzu + rm /tmp/tmp.CQW1SI1z2C /tmp/tmp.lC3RgmeNzu + return 0 + wait_deployment percona-server-mongodb-operator + local name=percona-server-mongodb-operator + sleep 10 + retry=0 + echo -n percona-server-mongodb-operator percona-server-mongodb-operator+ kubectl_bin get deployment percona-server-mongodb-operator -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.NHWXNNiktq ++ mktemp + local LAST_ERR=/tmp/tmp.QnLs4ChUNZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get deployment percona-server-mongodb-operator -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.NHWXNNiktq + cat /tmp/tmp.QnLs4ChUNZ + rm /tmp/tmp.NHWXNNiktq /tmp/tmp.QnLs4ChUNZ + return 0 ++ kubectl_bin get deployment percona-server-mongodb-operator -o 'jsonpath={.status.replicas}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.GLJTsSTB0F +++ mktemp ++ local LAST_ERR=/tmp/tmp.pPMmi8juvX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get deployment percona-server-mongodb-operator -o 'jsonpath={.status.replicas}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.GLJTsSTB0F ++ cat /tmp/tmp.pPMmi8juvX ++ rm /tmp/tmp.GLJTsSTB0F /tmp/tmp.pPMmi8juvX ++ return 0 ++ kubectl_bin get deployment percona-server-mongodb-operator -o 'jsonpath={.status.readyReplicas}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.rI84BygB9g +++ mktemp ++ local LAST_ERR=/tmp/tmp.5xIbWUoaqa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get deployment percona-server-mongodb-operator -o 'jsonpath={.status.readyReplicas}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rI84BygB9g ++ cat /tmp/tmp.5xIbWUoaqa ++ rm /tmp/tmp.rI84BygB9g /tmp/tmp.5xIbWUoaqa ++ return 0 + '[' 1 == 1 ']' + kubectl_bin delete pod -l run=version-service-cr -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.IoJvGaZXVO ++ mktemp + local LAST_ERR=/tmp/tmp.OJxfVoBkno + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pod -l run=version-service-cr -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.IoJvGaZXVO pod "version-service-cr-6fcc64c58f-29zcf" deleted from psmdb-operator namespace + cat /tmp/tmp.OJxfVoBkno + rm /tmp/tmp.IoJvGaZXVO /tmp/tmp.OJxfVoBkno + return 0 + kubectl_bin delete pod -l run=version-service -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.p5YC7iXu3H ++ mktemp + local LAST_ERR=/tmp/tmp.ZuNHAmTWgS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pod -l run=version-service -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.p5YC7iXu3H pod "version-service-65bc6b945c-kcwjw" deleted from psmdb-operator namespace + cat /tmp/tmp.ZuNHAmTWgS + rm /tmp/tmp.p5YC7iXu3H /tmp/tmp.ZuNHAmTWgS + return 0 + check_telemetry_transfer http://version-service-cr:11000 disabled disabled + local cr_vs_uri=http://version-service-cr:11000 + local cr_vs_channel=disabled + local telemetry_state=disabled + cluster=minimal-cluster + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.kFlTpWkulN ++ mktemp + local LAST_ERR=/tmp/tmp.ewZOfWU6JF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.kFlTpWkulN deployment.apps/psmdb-client created + cat /tmp/tmp.ewZOfWU6JF + rm /tmp/tmp.kFlTpWkulN /tmp/tmp.ewZOfWU6JF + return 0 + yq eval '.metadata.name = "minimal-cluster"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.Bnq8w95JiA ++ mktemp + local LAST_ERR=/tmp/tmp.j5oaYUcjt4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Bnq8w95JiA secret/minimal-cluster configured + cat /tmp/tmp.j5oaYUcjt4 Warning: resource secrets/minimal-cluster is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.Bnq8w95JiA /tmp/tmp.j5oaYUcjt4 + return 0 + desc 'create PSMDB minimal cluster minimal-cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB minimal cluster minimal-cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f - + yq eval ' .spec.upgradeOptions.versionServiceEndpoint = "http://version-service-cr:11000" | .spec.upgradeOptions.apply = "disabled" | .spec.initImage = "perconalab/percona-server-mongodb-operator:PR-2044-c13d0759" | .spec.crVersion = "9.9.9" | .spec.image = "perconalab/percona-server-mongodb-operator:main-mongod8.0" | .spec.pmm.image = "percona/pmm-client:2.44.1-1" | .spec.backup.enabled = false | .spec.backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cr-minimal.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.6DO57XGBt7 ++ mktemp + local LAST_ERR=/tmp/tmp.33CzSFrBjg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6DO57XGBt7 perconaservermongodb.psmdb.percona.com/minimal-cluster created + cat /tmp/tmp.33CzSFrBjg + rm /tmp/tmp.6DO57XGBt7 /tmp/tmp.33CzSFrBjg + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running minimal-cluster-rs0 1 + local name=minimal-cluster-rs0 + let last_pod=0 + : + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=minimal-cluster ++ seq 0 0 + for i in $(seq 0 $last_pod) + [[ 0 -eq 0 ]] ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LrWJYZfNEn +++ mktemp ++ local LAST_ERR=/tmp/tmp.uXo7kNbCBb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LrWJYZfNEn ++ cat /tmp/tmp.uXo7kNbCBb ++ rm /tmp/tmp.LrWJYZfNEn /tmp/tmp.uXo7kNbCBb ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod minimal-cluster-rs0-0 + local pod=minimal-cluster-rs0-0 + set +o xtrace waiting for pod/minimal-cluster-rs0-0 to be ready.............OK ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g8Rk5RDghh +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fm0OSmZcOt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.g8Rk5RDghh ++ cat /tmp/tmp.Fm0OSmZcOt ++ rm /tmp/tmp.g8Rk5RDghh /tmp/tmp.Fm0OSmZcOt ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3vPdcF9I9R +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rw3Uv7Fk0F ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb minimal-cluster -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3vPdcF9I9R ++ cat /tmp/tmp.Rw3Uv7Fk0F ++ rm /tmp/tmp.3vPdcF9I9R /tmp/tmp.Rw3Uv7Fk0F ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness................................ + sleep 20 + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZAlhRo1N09 +++ mktemp ++ local LAST_ERR=/tmp/tmp.x8wZXmAAts ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZAlhRo1N09 ++ cat /tmp/tmp.x8wZXmAAts ++ rm /tmp/tmp.ZAlhRo1N09 /tmp/tmp.x8wZXmAAts ++ return 0 + local client_container=psmdb-client-696897d69b-g95mj + local mongo_flag= + [[ userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-g95mj -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.dIaXchebbk ++ mktemp + local LAST_ERR=/tmp/tmp.BLDDZOhae5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-g95mj -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.dIaXchebbk Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://minimal-cluster-rs0-0.minimal-cluster-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("ada166ba-6b21-4aea-86de-3e0e2dd936e3") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.BLDDZOhae5 + rm /tmp/tmp.dIaXchebbk /tmp/tmp.BLDDZOhae5 + return 0 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@minimal-cluster-rs0.version-service-22351 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@minimal-cluster-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CCAX0ipy7O +++ mktemp ++ local LAST_ERR=/tmp/tmp.9wmb1pkkyf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CCAX0ipy7O ++ cat /tmp/tmp.9wmb1pkkyf ++ rm /tmp/tmp.CCAX0ipy7O /tmp/tmp.9wmb1pkkyf ++ return 0 + local client_container=psmdb-client-696897d69b-g95mj + local mongo_flag= + [[ myApp:myPass@minimal-cluster-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-g95mj -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.vo2ObtMP1k ++ mktemp + local LAST_ERR=/tmp/tmp.2ObW3BSj5Z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-g95mj -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@minimal-cluster-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vo2ObtMP1k Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://minimal-cluster-rs0-0.minimal-cluster-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("19636d6e-9771-4c7d-b02a-afc3353f136c") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.2ObW3BSj5Z + rm /tmp/tmp.vo2ObtMP1k /tmp/tmp.2ObW3BSj5Z + return 0 + desc 'check telemetry' + set +o xtrace ----------------------------------------------------------------------------------- check telemetry ----------------------------------------------------------------------------------- + grep -E 'server request payload|unary call' + grep -Eo '\{.*\}' ++ kubectl get pods --selector=run=version-service-cr -o 'jsonpath={.items[0].metadata.name}' -n psmdb-operator + jq 'del(."grpc.request.content".msg.customResourceUid)' + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.start_time")' + jq 'del(."grpc.time_ms")' + kubectl_bin logs version-service-cr-6fcc64c58f-xgffb -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.lZEGfzHsjA ++ mktemp + local LAST_ERR=/tmp/tmp.Qaym7wZ4Dh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs version-service-cr-6fcc64c58f-xgffb -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lZEGfzHsjA + cat /tmp/tmp.Qaym7wZ4Dh + rm /tmp/tmp.lZEGfzHsjA /tmp/tmp.Qaym7wZ4Dh + return 0 + grep -E 'server request payload|unary call' ++ kubectl get pods --selector=run=version-service -o 'jsonpath={.items[0].metadata.name}' -n psmdb-operator + grep -Eo '\{.*\}' + jq 'del(."grpc.request.content".msg.customResourceUid)' + jq 'del(."grpc.request.content".msg.kubeVersion)' + jq 'del(."grpc.start_time")' + jq 'del(."grpc.time_ms")' + kubectl_bin logs version-service-65bc6b945c-rzh4g -n psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.H4G5MEn2WB ++ mktemp + local LAST_ERR=/tmp/tmp.ZKDz5iMc2C + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs version-service-65bc6b945c-rzh4g -n psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.H4G5MEn2WB + cat /tmp/tmp.ZKDz5iMc2C + rm /tmp/tmp.H4G5MEn2WB /tmp/tmp.ZKDz5iMc2C + return 0 + local telemetry_log_file=disabled_telemetry.version-service.log-cw.json + desc 'telemetry was disabled in CR but in operator not' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR but in operator not ----------------------------------------------------------------------------------- + '[' disabled == disabled -a disabled == enabled ']' + local telemetry_cr_log_file=disabled_telemetry.version-service-cr.log-cw.json + local image_prefix=disabled + desc 'telemetry was disabled in operator but not in CR' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in operator but not in CR ----------------------------------------------------------------------------------- + '[' disabled == disabled-recommended -a disabled == disabled ']' + desc 'telemetry was disabled in CR as well as in operator' + set +o xtrace ----------------------------------------------------------------------------------- telemetry was disabled in CR as well as in operator ----------------------------------------------------------------------------------- + '[' disabled == disabled -a disabled == disabled ']' + [[ -s /tmp/tmp.QfOIVStdwq/disabled_telemetry.version-service-cr.log.json ]] + [[ -s /tmp/tmp.QfOIVStdwq/disabled_telemetry.version-service.log.json ]] ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.DKcLUmjxhI +++ mktemp ++ local LAST_ERR=/tmp/tmp.MSQil4AY4P ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DKcLUmjxhI ++ cat /tmp/tmp.MSQil4AY4P ++ rm /tmp/tmp.DKcLUmjxhI /tmp/tmp.MSQil4AY4P ++ return 0 + kubectl_bin delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-mzx9g ++ mktemp + local LAST_OUT=/tmp/tmp.YzZ5vA3Rrm ++ mktemp + local LAST_ERR=/tmp/tmp.CDhj353s8D + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-mzx9g + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YzZ5vA3Rrm pod "percona-server-mongodb-operator-79456db78d-mzx9g" deleted from psmdb-operator namespace + cat /tmp/tmp.CDhj353s8D + rm /tmp/tmp.YzZ5vA3Rrm /tmp/tmp.CDhj353s8D + return 0 + kubectl_bin patch psmdb minimal-cluster --type=merge -p '{"metadata":{"finalizers":["percona.com/delete-psmdb-pvc"]}}' ++ mktemp + local LAST_OUT=/tmp/tmp.uFklLZSXD3 ++ mktemp + local LAST_ERR=/tmp/tmp.Fnbyvlfzjn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl patch psmdb minimal-cluster --type=merge -p '{"metadata":{"finalizers":["percona.com/delete-psmdb-pvc"]}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uFklLZSXD3 perconaservermongodb.psmdb.percona.com/minimal-cluster patched + cat /tmp/tmp.Fnbyvlfzjn + rm /tmp/tmp.uFklLZSXD3 /tmp/tmp.Fnbyvlfzjn + return 0 + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.uPMUPQgpS1 ++ mktemp + local LAST_ERR=/tmp/tmp.XJY1QTnj1z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uPMUPQgpS1 perconaservermongodb.psmdb.percona.com "minimal-cluster" deleted from version-service-22351 namespace + cat /tmp/tmp.XJY1QTnj1z + rm /tmp/tmp.uPMUPQgpS1 /tmp/tmp.XJY1QTnj1z + return 0 + kubectl_bin delete deploy psmdb-client ++ mktemp + local LAST_OUT=/tmp/tmp.scgO5woByw ++ mktemp + local LAST_ERR=/tmp/tmp.jGM2zpPhGn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete deploy psmdb-client + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.scgO5woByw deployment.apps "psmdb-client" deleted from version-service-22351 namespace + cat /tmp/tmp.jGM2zpPhGn + rm /tmp/tmp.scgO5woByw /tmp/tmp.jGM2zpPhGn + return 0 + sleep 30 + cases=("version-service-exact" "version-service-recommended" "version-service-latest" "version-service-major" "version-service-unreachable") + expected_images=("percona/percona-server-mongodb:6.0.3-2" "percona/percona-server-mongodb:8.0.4-1-multi" "percona/percona-server-mongodb:8.0.4-1-multi" "percona/percona-server-mongodb:6.0.4-3" "$IMAGE_MONGOD") + for i in "${!cases[@]}" + desc 'test version-service-exact' + set +o xtrace ----------------------------------------------------------------------------------- test version-service-exact ----------------------------------------------------------------------------------- + cluster=version-service-exact + expected_image=percona/percona-server-mongodb:6.0.3-2 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.gWVLabXgVz ++ mktemp + local LAST_ERR=/tmp/tmp.GN9ZqVXDwD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gWVLabXgVz secret/some-users created deployment.apps/psmdb-client created + cat /tmp/tmp.GN9ZqVXDwD + rm /tmp/tmp.gWVLabXgVz /tmp/tmp.GN9ZqVXDwD + return 0 + desc 'create PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB cluster ----------------------------------------------------------------------------------- ++ mktemp + tmp_file=/tmp/tmp.IR6hXOgtfy + sed s%#initImage%perconalab/percona-server-mongodb-operator:PR-2044-c13d0759%g /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/version-service-exact-rs0.yml + desc 'create first PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- + yq eval ' .spec.backup.enabled = false | del(.spec.backup.tasks) | .spec.image = "perconalab/percona-server-mongodb-operator:main-mongod8.0" | .spec.pmm.image = "percona/pmm-client:2.44.1-1" | .spec.backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' /tmp/tmp.IR6hXOgtfy + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.YnxZWnL9Yh ++ mktemp + local LAST_ERR=/tmp/tmp.wQf7mFV51G + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YnxZWnL9Yh perconaservermongodb.psmdb.percona.com/version-service-exact created + cat /tmp/tmp.wQf7mFV51G + rm /tmp/tmp.YnxZWnL9Yh /tmp/tmp.wQf7mFV51G + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running version-service-exact-rs0 3 + local name=version-service-exact-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=version-service-exact ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod version-service-exact-rs0-0 + local pod=version-service-exact-rs0-0 + set +o xtrace waiting for pod/version-service-exact-rs0-0 to be ready.....................OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod version-service-exact-rs0-1 + local pod=version-service-exact-rs0-1 + set +o xtrace waiting for pod/version-service-exact-rs0-1 to be ready....................OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb version-service-exact -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oABWq50Hrk +++ mktemp ++ local LAST_ERR=/tmp/tmp.AVNIfx5YMJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-exact -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.oABWq50Hrk ++ cat /tmp/tmp.AVNIfx5YMJ ++ rm /tmp/tmp.oABWq50Hrk /tmp/tmp.AVNIfx5YMJ ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod version-service-exact-rs0-2 + local pod=version-service-exact-rs0-2 + set +o xtrace waiting for pod/version-service-exact-rs0-2 to be ready......................OK ++ kubectl_bin get psmdb version-service-exact -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CQHlk0qqtS +++ mktemp ++ local LAST_ERR=/tmp/tmp.CPnA9iOFGb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-exact -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CQHlk0qqtS ++ cat /tmp/tmp.CPnA9iOFGb ++ rm /tmp/tmp.CQHlk0qqtS /tmp/tmp.CPnA9iOFGb ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb version-service-exact -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YmXxIPibng +++ mktemp ++ local LAST_ERR=/tmp/tmp.xkJCFTtCND ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-exact -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.YmXxIPibng ++ cat /tmp/tmp.xkJCFTtCND ++ rm /tmp/tmp.YmXxIPibng /tmp/tmp.xkJCFTtCND ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + sleep 20 + desc 'check if statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/version-service-exact-rs0 + local resource=statefulset/version-service-exact-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-exact-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-exact-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-exact-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.7gEq7rmZlA ++ mktemp + local LAST_ERR=/tmp/tmp.mauPcg71xl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-exact-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7gEq7rmZlA + cat /tmp/tmp.mauPcg71xl + rm /tmp/tmp.7gEq7rmZlA /tmp/tmp.mauPcg71xl + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-exact-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-exact-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + log 'compare_kubectl: statefulset/version-service-exact-rs0 OK' + set +o xtrace [2025-12-10T15:15:13+0000] compare_kubectl: statefulset/version-service-exact-rs0 OK + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@version-service-exact-rs0.version-service-22351 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@version-service-exact-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UVnQsyxKxe +++ mktemp ++ local LAST_ERR=/tmp/tmp.IY5FkjRuoF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UVnQsyxKxe ++ cat /tmp/tmp.IY5FkjRuoF ++ rm /tmp/tmp.UVnQsyxKxe /tmp/tmp.IY5FkjRuoF ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ userAdmin:userAdmin123456@version-service-exact-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-exact-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Yw7xYhGzET ++ mktemp + local LAST_ERR=/tmp/tmp.ROYGORIBNL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-exact-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Yw7xYhGzET Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-exact-rs0-0.version-service-exact-rs0.version-service-22351.svc.cluster.local:27017,version-service-exact-rs0-1.version-service-exact-rs0.version-service-22351.svc.cluster.local:27017,version-service-exact-rs0-2.version-service-exact-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("b8fe1346-4d91-4dcc-8677-b99c54594a0c") } Percona Server for MongoDB server version: v6.0.3-2 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.ROYGORIBNL + rm /tmp/tmp.Yw7xYhGzET /tmp/tmp.ROYGORIBNL + return 0 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@version-service-exact-rs0.version-service-22351 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@version-service-exact-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LBkz62o32V +++ mktemp ++ local LAST_ERR=/tmp/tmp.QK1LoWRtOR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LBkz62o32V ++ cat /tmp/tmp.QK1LoWRtOR ++ rm /tmp/tmp.LBkz62o32V /tmp/tmp.QK1LoWRtOR ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ myApp:myPass@version-service-exact-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-exact-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.9t0x1F8xoS ++ mktemp + local LAST_ERR=/tmp/tmp.6MHq0ElKO2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-exact-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9t0x1F8xoS Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-exact-rs0-2.version-service-exact-rs0.version-service-22351.svc.cluster.local:27017,version-service-exact-rs0-0.version-service-exact-rs0.version-service-22351.svc.cluster.local:27017,version-service-exact-rs0-1.version-service-exact-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("7f6fd8c8-4f31-47f3-8aa7-2544ead89d66") } Percona Server for MongoDB server version: v6.0.3-2 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.6MHq0ElKO2 + rm /tmp/tmp.9t0x1F8xoS /tmp/tmp.6MHq0ElKO2 + return 0 + compare_kubectl statefulset/version-service-exact-rs0 + local resource=statefulset/version-service-exact-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-exact-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-exact-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-exact-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.sBzCkGOBRN ++ mktemp + local LAST_ERR=/tmp/tmp.eFOtt23ruQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-exact-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sBzCkGOBRN + cat /tmp/tmp.eFOtt23ruQ + rm /tmp/tmp.sBzCkGOBRN /tmp/tmp.eFOtt23ruQ + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-exact-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-exact-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-exact-rs0.yml + log 'compare_kubectl: statefulset/version-service-exact-rs0 OK' + set +o xtrace [2025-12-10T15:15:17+0000] compare_kubectl: statefulset/version-service-exact-rs0 OK + pods=($(kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name)) ++ kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name + '[' 3 -eq 0 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-exact-rs0-0 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:6.0.3-2 + '[' percona/percona-server-mongodb:6.0.3-2 '!=' percona/percona-server-mongodb:6.0.3-2 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-exact-rs0-1 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:6.0.3-2 + '[' percona/percona-server-mongodb:6.0.3-2 '!=' percona/percona-server-mongodb:6.0.3-2 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-exact-rs0-2 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:6.0.3-2 + '[' percona/percona-server-mongodb:6.0.3-2 '!=' percona/percona-server-mongodb:6.0.3-2 ']' + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.HJUEHZk95Z ++ mktemp + local LAST_ERR=/tmp/tmp.znG6FOBQQt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.HJUEHZk95Z perconaservermongodb.psmdb.percona.com "version-service-exact" deleted from version-service-22351 namespace + cat /tmp/tmp.znG6FOBQQt + rm /tmp/tmp.HJUEHZk95Z /tmp/tmp.znG6FOBQQt + return 0 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.V7418CBkXO +++ mktemp ++ local LAST_ERR=/tmp/tmp.U1ID8E9o7B ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.V7418CBkXO ++ cat /tmp/tmp.U1ID8E9o7B ++ rm /tmp/tmp.V7418CBkXO /tmp/tmp.U1ID8E9o7B ++ return 0 + kubectl delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-h7wfx pod "percona-server-mongodb-operator-79456db78d-h7wfx" deleted from psmdb-operator namespace + sleep 10 + for i in "${!cases[@]}" + desc 'test version-service-recommended' + set +o xtrace ----------------------------------------------------------------------------------- test version-service-recommended ----------------------------------------------------------------------------------- + cluster=version-service-recommended + expected_image=percona/percona-server-mongodb:8.0.4-1-multi + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.EGOWebaYqa ++ mktemp + local LAST_ERR=/tmp/tmp.WM2G1Mn52a + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EGOWebaYqa secret/some-users configured deployment.apps/psmdb-client unchanged + cat /tmp/tmp.WM2G1Mn52a Warning: resource secrets/some-users is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.EGOWebaYqa /tmp/tmp.WM2G1Mn52a + return 0 + desc 'create PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB cluster ----------------------------------------------------------------------------------- ++ mktemp + tmp_file=/tmp/tmp.1XGIJjC4nT + sed s%#initImage%perconalab/percona-server-mongodb-operator:PR-2044-c13d0759%g /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/version-service-recommended-rs0.yml + desc 'create first PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- + yq eval ' .spec.backup.enabled = false | del(.spec.backup.tasks) | .spec.image = "perconalab/percona-server-mongodb-operator:main-mongod8.0" | .spec.pmm.image = "percona/pmm-client:2.44.1-1" | .spec.backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' /tmp/tmp.1XGIJjC4nT + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.GK5dXLkNzq ++ mktemp + local LAST_ERR=/tmp/tmp.8rLeruz1pq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GK5dXLkNzq perconaservermongodb.psmdb.percona.com/version-service-recommended created + cat /tmp/tmp.8rLeruz1pq + rm /tmp/tmp.GK5dXLkNzq /tmp/tmp.8rLeruz1pq + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running version-service-recommended-rs0 3 + local name=version-service-recommended-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=version-service-recommended ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod version-service-recommended-rs0-0 + local pod=version-service-recommended-rs0-0 + set +o xtrace waiting for pod/version-service-recommended-rs0-0 to be ready.............OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod version-service-recommended-rs0-1 + local pod=version-service-recommended-rs0-1 + set +o xtrace waiting for pod/version-service-recommended-rs0-1 to be ready...............OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb version-service-recommended -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LNYkO18xVc +++ mktemp ++ local LAST_ERR=/tmp/tmp.3zUNbYNsOM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-recommended -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.LNYkO18xVc ++ cat /tmp/tmp.3zUNbYNsOM ++ rm /tmp/tmp.LNYkO18xVc /tmp/tmp.3zUNbYNsOM ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod version-service-recommended-rs0-2 + local pod=version-service-recommended-rs0-2 + set +o xtrace waiting for pod/version-service-recommended-rs0-2 to be ready..............OK ++ kubectl_bin get psmdb version-service-recommended -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wdzgWzbMXl +++ mktemp ++ local LAST_ERR=/tmp/tmp.bhQxNByXiI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-recommended -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wdzgWzbMXl ++ cat /tmp/tmp.bhQxNByXiI ++ rm /tmp/tmp.wdzgWzbMXl /tmp/tmp.bhQxNByXiI ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb version-service-recommended -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MJXonA4NsT +++ mktemp ++ local LAST_ERR=/tmp/tmp.t52q5EmSYc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-recommended -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MJXonA4NsT ++ cat /tmp/tmp.t52q5EmSYc ++ rm /tmp/tmp.MJXonA4NsT /tmp/tmp.t52q5EmSYc ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness. + sleep 20 + desc 'check if statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/version-service-recommended-rs0 + local resource=statefulset/version-service-recommended-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-recommended-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-recommended-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-recommended-rs0 ++ mktemp + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - + local LAST_OUT=/tmp/tmp.ZjWxn9Rp7e ++ mktemp + local LAST_ERR=/tmp/tmp.bIgV70AMIr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-recommended-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZjWxn9Rp7e + cat /tmp/tmp.bIgV70AMIr + rm /tmp/tmp.ZjWxn9Rp7e /tmp/tmp.bIgV70AMIr + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-recommended-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-recommended-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + log 'compare_kubectl: statefulset/version-service-recommended-rs0 OK' + set +o xtrace [2025-12-10T15:17:25+0000] compare_kubectl: statefulset/version-service-recommended-rs0 OK + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@version-service-recommended-rs0.version-service-22351 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@version-service-recommended-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IghdywGlo3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SQc2oJvkij ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IghdywGlo3 ++ cat /tmp/tmp.SQc2oJvkij ++ rm /tmp/tmp.IghdywGlo3 /tmp/tmp.SQc2oJvkij ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ userAdmin:userAdmin123456@version-service-recommended-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-recommended-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.68vcJfojZQ ++ mktemp + local LAST_ERR=/tmp/tmp.42wY7rqyGm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-recommended-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.68vcJfojZQ Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-recommended-rs0-1.version-service-recommended-rs0.version-service-22351.svc.cluster.local:27017,version-service-recommended-rs0-2.version-service-recommended-rs0.version-service-22351.svc.cluster.local:27017,version-service-recommended-rs0-0.version-service-recommended-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("61b18541-aa7e-4d69-9339-42b0870ee0cf") } Percona Server for MongoDB server version: v8.0.4-1 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.42wY7rqyGm + rm /tmp/tmp.68vcJfojZQ /tmp/tmp.42wY7rqyGm + return 0 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@version-service-recommended-rs0.version-service-22351 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@version-service-recommended-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wmOshzopA8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UA2JGMbdsM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.wmOshzopA8 ++ cat /tmp/tmp.UA2JGMbdsM ++ rm /tmp/tmp.wmOshzopA8 /tmp/tmp.UA2JGMbdsM ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ myApp:myPass@version-service-recommended-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-recommended-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.wMA364zsE5 ++ mktemp + local LAST_ERR=/tmp/tmp.M8WCAoDJHX + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-recommended-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wMA364zsE5 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-recommended-rs0-0.version-service-recommended-rs0.version-service-22351.svc.cluster.local:27017,version-service-recommended-rs0-1.version-service-recommended-rs0.version-service-22351.svc.cluster.local:27017,version-service-recommended-rs0-2.version-service-recommended-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("57ce0838-6a96-4998-9d2a-575b50e5e547") } Percona Server for MongoDB server version: v8.0.4-1 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.M8WCAoDJHX + rm /tmp/tmp.wMA364zsE5 /tmp/tmp.M8WCAoDJHX + return 0 + compare_kubectl statefulset/version-service-recommended-rs0 + local resource=statefulset/version-service-recommended-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-recommended-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-recommended-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-recommended-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.9TRMSnJkUb ++ mktemp + local LAST_ERR=/tmp/tmp.hS7QgL5u1h + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-recommended-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9TRMSnJkUb + cat /tmp/tmp.hS7QgL5u1h + rm /tmp/tmp.9TRMSnJkUb /tmp/tmp.hS7QgL5u1h + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-recommended-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-recommended-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-recommended-rs0.yml + log 'compare_kubectl: statefulset/version-service-recommended-rs0 OK' + set +o xtrace [2025-12-10T15:17:29+0000] compare_kubectl: statefulset/version-service-recommended-rs0 OK + pods=($(kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name)) ++ kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name + '[' 3 -eq 0 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-recommended-rs0-0 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:8.0.4-1-multi + '[' percona/percona-server-mongodb:8.0.4-1-multi '!=' percona/percona-server-mongodb:8.0.4-1-multi ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-recommended-rs0-1 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:8.0.4-1-multi + '[' percona/percona-server-mongodb:8.0.4-1-multi '!=' percona/percona-server-mongodb:8.0.4-1-multi ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-recommended-rs0-2 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:8.0.4-1-multi + '[' percona/percona-server-mongodb:8.0.4-1-multi '!=' percona/percona-server-mongodb:8.0.4-1-multi ']' + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.O7X2infFtI ++ mktemp + local LAST_ERR=/tmp/tmp.LoxajqVsVV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.O7X2infFtI perconaservermongodb.psmdb.percona.com "version-service-recommended" deleted from version-service-22351 namespace + cat /tmp/tmp.LoxajqVsVV + rm /tmp/tmp.O7X2infFtI /tmp/tmp.LoxajqVsVV + return 0 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.CRfI9fQVcZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.3DCb6r929J ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CRfI9fQVcZ ++ cat /tmp/tmp.3DCb6r929J ++ rm /tmp/tmp.CRfI9fQVcZ /tmp/tmp.3DCb6r929J ++ return 0 + kubectl delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-fj682 pod "percona-server-mongodb-operator-79456db78d-fj682" deleted from psmdb-operator namespace + sleep 10 + for i in "${!cases[@]}" + desc 'test version-service-latest' + set +o xtrace ----------------------------------------------------------------------------------- test version-service-latest ----------------------------------------------------------------------------------- + cluster=version-service-latest + expected_image=percona/percona-server-mongodb:8.0.4-1-multi + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.iYT31HYTLw ++ mktemp + local LAST_ERR=/tmp/tmp.csb9oXkOWl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iYT31HYTLw secret/some-users configured deployment.apps/psmdb-client unchanged + cat /tmp/tmp.csb9oXkOWl Warning: resource secrets/some-users is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.iYT31HYTLw /tmp/tmp.csb9oXkOWl + return 0 + desc 'create PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB cluster ----------------------------------------------------------------------------------- ++ mktemp + tmp_file=/tmp/tmp.pG37Za0Z19 + sed s%#initImage%perconalab/percona-server-mongodb-operator:PR-2044-c13d0759%g /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/version-service-latest-rs0.yml + desc 'create first PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- + yq eval ' .spec.backup.enabled = false | del(.spec.backup.tasks) | .spec.image = "perconalab/percona-server-mongodb-operator:main-mongod8.0" | .spec.pmm.image = "percona/pmm-client:2.44.1-1" | .spec.backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' /tmp/tmp.pG37Za0Z19 + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.ZYbI1JhKVA ++ mktemp + local LAST_ERR=/tmp/tmp.1fR6QvDpGD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZYbI1JhKVA perconaservermongodb.psmdb.percona.com/version-service-latest created + cat /tmp/tmp.1fR6QvDpGD + rm /tmp/tmp.ZYbI1JhKVA /tmp/tmp.1fR6QvDpGD + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running version-service-latest-rs0 3 + local name=version-service-latest-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=version-service-latest ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod version-service-latest-rs0-0 + local pod=version-service-latest-rs0-0 + set +o xtrace waiting for pod/version-service-latest-rs0-0 to be ready..............OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod version-service-latest-rs0-1 + local pod=version-service-latest-rs0-1 + set +o xtrace waiting for pod/version-service-latest-rs0-1 to be ready................OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb version-service-latest -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ASujg0qZKY +++ mktemp ++ local LAST_ERR=/tmp/tmp.R2Ly6bpgUk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-latest -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ASujg0qZKY ++ cat /tmp/tmp.R2Ly6bpgUk ++ rm /tmp/tmp.ASujg0qZKY /tmp/tmp.R2Ly6bpgUk ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod version-service-latest-rs0-2 + local pod=version-service-latest-rs0-2 + set +o xtrace waiting for pod/version-service-latest-rs0-2 to be ready.................OK ++ kubectl_bin get psmdb version-service-latest -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xGiW2OqQdz +++ mktemp ++ local LAST_ERR=/tmp/tmp.mVYQPAoaAu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-latest -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xGiW2OqQdz ++ cat /tmp/tmp.mVYQPAoaAu ++ rm /tmp/tmp.xGiW2OqQdz /tmp/tmp.mVYQPAoaAu ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb version-service-latest -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nxNaGfIUhl +++ mktemp ++ local LAST_ERR=/tmp/tmp.IBJOtUWUfn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-latest -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nxNaGfIUhl ++ cat /tmp/tmp.IBJOtUWUfn ++ rm /tmp/tmp.nxNaGfIUhl /tmp/tmp.IBJOtUWUfn ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + sleep 20 + desc 'check if statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/version-service-latest-rs0 + local resource=statefulset/version-service-latest-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-latest-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-latest-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-latest-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.Gj1gbh6sCQ ++ mktemp + local LAST_ERR=/tmp/tmp.8H4F5llZsx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-latest-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Gj1gbh6sCQ + cat /tmp/tmp.8H4F5llZsx + rm /tmp/tmp.Gj1gbh6sCQ /tmp/tmp.8H4F5llZsx + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-latest-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-latest-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + log 'compare_kubectl: statefulset/version-service-latest-rs0 OK' + set +o xtrace [2025-12-10T15:19:44+0000] compare_kubectl: statefulset/version-service-latest-rs0 OK + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@version-service-latest-rs0.version-service-22351 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@version-service-latest-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V5Coo3o11R +++ mktemp ++ local LAST_ERR=/tmp/tmp.e431oiyc5K ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.V5Coo3o11R ++ cat /tmp/tmp.e431oiyc5K ++ rm /tmp/tmp.V5Coo3o11R /tmp/tmp.e431oiyc5K ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ userAdmin:userAdmin123456@version-service-latest-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-latest-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.brdOnECx3O ++ mktemp + local LAST_ERR=/tmp/tmp.m6MRPYDoXu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-latest-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.brdOnECx3O Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-latest-rs0-2.version-service-latest-rs0.version-service-22351.svc.cluster.local:27017,version-service-latest-rs0-0.version-service-latest-rs0.version-service-22351.svc.cluster.local:27017,version-service-latest-rs0-1.version-service-latest-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("39ba0559-b27a-4915-aa43-4366fbe2b524") } Percona Server for MongoDB server version: v8.0.4-1 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.m6MRPYDoXu + rm /tmp/tmp.brdOnECx3O /tmp/tmp.m6MRPYDoXu + return 0 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@version-service-latest-rs0.version-service-22351 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@version-service-latest-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iZWogULFQP +++ mktemp ++ local LAST_ERR=/tmp/tmp.80noUpbH0b ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iZWogULFQP ++ cat /tmp/tmp.80noUpbH0b ++ rm /tmp/tmp.iZWogULFQP /tmp/tmp.80noUpbH0b ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ myApp:myPass@version-service-latest-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-latest-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.rfbXPUFOQN ++ mktemp + local LAST_ERR=/tmp/tmp.7cjreeEh6f + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-latest-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rfbXPUFOQN Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-latest-rs0-1.version-service-latest-rs0.version-service-22351.svc.cluster.local:27017,version-service-latest-rs0-2.version-service-latest-rs0.version-service-22351.svc.cluster.local:27017,version-service-latest-rs0-0.version-service-latest-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("7f8b2120-4792-44c2-8242-2c4ae3a71416") } Percona Server for MongoDB server version: v8.0.4-1 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.7cjreeEh6f + rm /tmp/tmp.rfbXPUFOQN /tmp/tmp.7cjreeEh6f + return 0 + compare_kubectl statefulset/version-service-latest-rs0 + local resource=statefulset/version-service-latest-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-latest-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-latest-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-latest-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.LXauLClrZs ++ mktemp + local LAST_ERR=/tmp/tmp.b14m2iUGO6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-latest-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LXauLClrZs + cat /tmp/tmp.b14m2iUGO6 + rm /tmp/tmp.LXauLClrZs /tmp/tmp.b14m2iUGO6 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-latest-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-latest-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-latest-rs0.yml + log 'compare_kubectl: statefulset/version-service-latest-rs0 OK' + set +o xtrace [2025-12-10T15:19:47+0000] compare_kubectl: statefulset/version-service-latest-rs0 OK + pods=($(kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name)) ++ kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name + '[' 3 -eq 0 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-latest-rs0-0 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:8.0.4-1-multi + '[' percona/percona-server-mongodb:8.0.4-1-multi '!=' percona/percona-server-mongodb:8.0.4-1-multi ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-latest-rs0-1 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:8.0.4-1-multi + '[' percona/percona-server-mongodb:8.0.4-1-multi '!=' percona/percona-server-mongodb:8.0.4-1-multi ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-latest-rs0-2 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:8.0.4-1-multi + '[' percona/percona-server-mongodb:8.0.4-1-multi '!=' percona/percona-server-mongodb:8.0.4-1-multi ']' + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.TF5HsE8RJn ++ mktemp + local LAST_ERR=/tmp/tmp.5WLUbdojwt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TF5HsE8RJn perconaservermongodb.psmdb.percona.com "version-service-latest" deleted from version-service-22351 namespace + cat /tmp/tmp.5WLUbdojwt + rm /tmp/tmp.TF5HsE8RJn /tmp/tmp.5WLUbdojwt + return 0 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.kdN6OPtjTi +++ mktemp ++ local LAST_ERR=/tmp/tmp.8iiCZbM81N ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kdN6OPtjTi ++ cat /tmp/tmp.8iiCZbM81N ++ rm /tmp/tmp.kdN6OPtjTi /tmp/tmp.8iiCZbM81N ++ return 0 + kubectl delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-s847n pod "percona-server-mongodb-operator-79456db78d-s847n" deleted from psmdb-operator namespace + sleep 10 + for i in "${!cases[@]}" + desc 'test version-service-major' + set +o xtrace ----------------------------------------------------------------------------------- test version-service-major ----------------------------------------------------------------------------------- + cluster=version-service-major + expected_image=percona/percona-server-mongodb:6.0.4-3 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.5g792XJOb9 ++ mktemp + local LAST_ERR=/tmp/tmp.Cn9CquYanZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5g792XJOb9 secret/some-users configured deployment.apps/psmdb-client unchanged + cat /tmp/tmp.Cn9CquYanZ Warning: resource secrets/some-users is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.5g792XJOb9 /tmp/tmp.Cn9CquYanZ + return 0 + desc 'create PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB cluster ----------------------------------------------------------------------------------- ++ mktemp + tmp_file=/tmp/tmp.Qbh3WD12ra + sed s%#initImage%perconalab/percona-server-mongodb-operator:PR-2044-c13d0759%g /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/version-service-major-rs0.yml + desc 'create first PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- + yq eval ' .spec.backup.enabled = false | del(.spec.backup.tasks) | .spec.image = "perconalab/percona-server-mongodb-operator:main-mongod8.0" | .spec.pmm.image = "percona/pmm-client:2.44.1-1" | .spec.backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' /tmp/tmp.Qbh3WD12ra + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.nWFyrl921j ++ mktemp + local LAST_ERR=/tmp/tmp.tXmb8wnXRn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nWFyrl921j perconaservermongodb.psmdb.percona.com/version-service-major created + cat /tmp/tmp.tXmb8wnXRn + rm /tmp/tmp.nWFyrl921j /tmp/tmp.tXmb8wnXRn + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running version-service-major-rs0 3 + local name=version-service-major-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=version-service-major ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod version-service-major-rs0-0 + local pod=version-service-major-rs0-0 + set +o xtrace waiting for pod/version-service-major-rs0-0 to be ready....................OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod version-service-major-rs0-1 + local pod=version-service-major-rs0-1 + set +o xtrace waiting for pod/version-service-major-rs0-1 to be ready.......................OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb version-service-major -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7GZI54J5jV +++ mktemp ++ local LAST_ERR=/tmp/tmp.UtnZ9RlPBe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-major -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7GZI54J5jV ++ cat /tmp/tmp.UtnZ9RlPBe ++ rm /tmp/tmp.7GZI54J5jV /tmp/tmp.UtnZ9RlPBe ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod version-service-major-rs0-2 + local pod=version-service-major-rs0-2 + set +o xtrace waiting for pod/version-service-major-rs0-2 to be ready........................OK ++ kubectl_bin get psmdb version-service-major -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3T6nh2BISS +++ mktemp ++ local LAST_ERR=/tmp/tmp.2oAZbHKRet ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-major -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3T6nh2BISS ++ cat /tmp/tmp.2oAZbHKRet ++ rm /tmp/tmp.3T6nh2BISS /tmp/tmp.2oAZbHKRet ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb version-service-major -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IT2IqAH0BW +++ mktemp ++ local LAST_ERR=/tmp/tmp.3v59rd2Qdx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-major -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.IT2IqAH0BW ++ cat /tmp/tmp.3v59rd2Qdx ++ rm /tmp/tmp.IT2IqAH0BW /tmp/tmp.3v59rd2Qdx ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + sleep 20 + desc 'check if statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/version-service-major-rs0 + local resource=statefulset/version-service-major-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-major-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-major-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-major-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.xyT6n4Olau ++ mktemp + local LAST_ERR=/tmp/tmp.gEb0f9GA8F + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-major-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xyT6n4Olau + cat /tmp/tmp.gEb0f9GA8F + rm /tmp/tmp.xyT6n4Olau /tmp/tmp.gEb0f9GA8F + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-major-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-major-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + log 'compare_kubectl: statefulset/version-service-major-rs0 OK' + set +o xtrace [2025-12-10T15:22:32+0000] compare_kubectl: statefulset/version-service-major-rs0 OK + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@version-service-major-rs0.version-service-22351 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@version-service-major-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1PALbGgw8o +++ mktemp ++ local LAST_ERR=/tmp/tmp.AGxqfH3DoP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1PALbGgw8o ++ cat /tmp/tmp.AGxqfH3DoP ++ rm /tmp/tmp.1PALbGgw8o /tmp/tmp.AGxqfH3DoP ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ userAdmin:userAdmin123456@version-service-major-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-major-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.124lWSnXs2 ++ mktemp + local LAST_ERR=/tmp/tmp.OcyLJOCnXC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-major-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.124lWSnXs2 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-major-rs0-1.version-service-major-rs0.version-service-22351.svc.cluster.local:27017,version-service-major-rs0-2.version-service-major-rs0.version-service-22351.svc.cluster.local:27017,version-service-major-rs0-0.version-service-major-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("e65e1d73-710a-47b1-bb66-79a8df42fb1a") } Percona Server for MongoDB server version: v6.0.4-3 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.OcyLJOCnXC + rm /tmp/tmp.124lWSnXs2 /tmp/tmp.OcyLJOCnXC + return 0 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@version-service-major-rs0.version-service-22351 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@version-service-major-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.upTMIT0iUX +++ mktemp ++ local LAST_ERR=/tmp/tmp.LbUl7dlqzx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.upTMIT0iUX ++ cat /tmp/tmp.LbUl7dlqzx ++ rm /tmp/tmp.upTMIT0iUX /tmp/tmp.LbUl7dlqzx ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ myApp:myPass@version-service-major-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-major-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.O3BBDiVj4F ++ mktemp + local LAST_ERR=/tmp/tmp.ahs4NFQmOR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-major-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.O3BBDiVj4F Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-major-rs0-0.version-service-major-rs0.version-service-22351.svc.cluster.local:27017,version-service-major-rs0-1.version-service-major-rs0.version-service-22351.svc.cluster.local:27017,version-service-major-rs0-2.version-service-major-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("e28c595e-61de-4c14-a038-494ecc4a48c6") } Percona Server for MongoDB server version: v6.0.4-3 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.ahs4NFQmOR + rm /tmp/tmp.O3BBDiVj4F /tmp/tmp.ahs4NFQmOR + return 0 + compare_kubectl statefulset/version-service-major-rs0 + local resource=statefulset/version-service-major-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-major-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-major-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-major-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.l7VN8eYlZy ++ mktemp + local LAST_ERR=/tmp/tmp.bHYhaI4LqZ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-major-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.l7VN8eYlZy + cat /tmp/tmp.bHYhaI4LqZ + rm /tmp/tmp.l7VN8eYlZy /tmp/tmp.bHYhaI4LqZ + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-major-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-major-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-major-rs0.yml + log 'compare_kubectl: statefulset/version-service-major-rs0 OK' + set +o xtrace [2025-12-10T15:22:36+0000] compare_kubectl: statefulset/version-service-major-rs0 OK + pods=($(kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name)) ++ kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name + '[' 3 -eq 0 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-major-rs0-0 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:6.0.4-3 + '[' percona/percona-server-mongodb:6.0.4-3 '!=' percona/percona-server-mongodb:6.0.4-3 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-major-rs0-1 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:6.0.4-3 + '[' percona/percona-server-mongodb:6.0.4-3 '!=' percona/percona-server-mongodb:6.0.4-3 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-major-rs0-2 -o 'jsonpath={.spec.containers[0].image}' + img=percona/percona-server-mongodb:6.0.4-3 + '[' percona/percona-server-mongodb:6.0.4-3 '!=' percona/percona-server-mongodb:6.0.4-3 ']' + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.EZZNSRZMd6 ++ mktemp + local LAST_ERR=/tmp/tmp.Rq9iG32wvs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EZZNSRZMd6 perconaservermongodb.psmdb.percona.com "version-service-major" deleted from version-service-22351 namespace + cat /tmp/tmp.Rq9iG32wvs + rm /tmp/tmp.EZZNSRZMd6 /tmp/tmp.Rq9iG32wvs + return 0 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.I5Zapa6oPx +++ mktemp ++ local LAST_ERR=/tmp/tmp.gfBnwbNXav ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.I5Zapa6oPx ++ cat /tmp/tmp.gfBnwbNXav ++ rm /tmp/tmp.I5Zapa6oPx /tmp/tmp.gfBnwbNXav ++ return 0 + kubectl delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-h7qp2 pod "percona-server-mongodb-operator-79456db78d-h7qp2" deleted from psmdb-operator namespace + sleep 10 + for i in "${!cases[@]}" + desc 'test version-service-unreachable' + set +o xtrace ----------------------------------------------------------------------------------- test version-service-unreachable ----------------------------------------------------------------------------------- + cluster=version-service-unreachable + expected_image=perconalab/percona-server-mongodb-operator:main-mongod8.0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.un4pYnZsdf ++ mktemp + local LAST_ERR=/tmp/tmp.ubQBgheI4c + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/secrets.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.un4pYnZsdf secret/some-users configured deployment.apps/psmdb-client unchanged + cat /tmp/tmp.ubQBgheI4c Warning: resource secrets/some-users is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.un4pYnZsdf /tmp/tmp.ubQBgheI4c + return 0 + desc 'create PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PSMDB cluster ----------------------------------------------------------------------------------- ++ mktemp + tmp_file=/tmp/tmp.FX3K3gIGaW + sed s%#initImage%perconalab/percona-server-mongodb-operator:PR-2044-c13d0759%g /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/conf/version-service-unreachable-rs0.yml + desc 'create first PSMDB cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- + yq eval ' .spec.backup.enabled = false | del(.spec.backup.tasks) | .spec.image = "perconalab/percona-server-mongodb-operator:main-mongod8.0" | .spec.pmm.image = "percona/pmm-client:2.44.1-1" | .spec.backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' /tmp/tmp.FX3K3gIGaW + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.5O90BzYin8 ++ mktemp + local LAST_ERR=/tmp/tmp.NSNMn9H1q7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.5O90BzYin8 perconaservermongodb.psmdb.percona.com/version-service-unreachable created + cat /tmp/tmp.NSNMn9H1q7 + rm /tmp/tmp.5O90BzYin8 /tmp/tmp.NSNMn9H1q7 + return 0 + desc 'check if Pod is started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod is started ----------------------------------------------------------------------------------- + wait_for_running version-service-unreachable-rs0 3 + local name=version-service-unreachable-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=version-service-unreachable ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod version-service-unreachable-rs0-0 + local pod=version-service-unreachable-rs0-0 + set +o xtrace waiting for pod/version-service-unreachable-rs0-0 to be ready............OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod version-service-unreachable-rs0-1 + local pod=version-service-unreachable-rs0-1 + set +o xtrace waiting for pod/version-service-unreachable-rs0-1 to be ready................OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb version-service-unreachable -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.USrfMsLF2G +++ mktemp ++ local LAST_ERR=/tmp/tmp.OgFpix0IjX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-unreachable -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.USrfMsLF2G ++ cat /tmp/tmp.OgFpix0IjX ++ rm /tmp/tmp.USrfMsLF2G /tmp/tmp.OgFpix0IjX ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod version-service-unreachable-rs0-2 + local pod=version-service-unreachable-rs0-2 + set +o xtrace waiting for pod/version-service-unreachable-rs0-2 to be ready..............OK ++ kubectl_bin get psmdb version-service-unreachable -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SowAhaLXNL +++ mktemp ++ local LAST_ERR=/tmp/tmp.OTPhqKD7za ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-unreachable -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].nonvoting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.SowAhaLXNL ++ cat /tmp/tmp.OTPhqKD7za ++ rm /tmp/tmp.SowAhaLXNL /tmp/tmp.OTPhqKD7za ++ return 0 + [[ '' == \t\r\u\e ]] ++ kubectl_bin get psmdb version-service-unreachable -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OrCYLoLsaZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xz2QAXh1AP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb version-service-unreachable -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].hidden.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.OrCYLoLsaZ ++ cat /tmp/tmp.Xz2QAXh1AP ++ rm /tmp/tmp.OrCYLoLsaZ /tmp/tmp.Xz2QAXh1AP ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + sleep 20 + desc 'check if statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/version-service-unreachable-rs0 + local resource=statefulset/version-service-unreachable-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-unreachable-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-unreachable-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-unreachable-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.K9yiMm8QSs ++ mktemp + local LAST_ERR=/tmp/tmp.Ihm0CLZTav + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-unreachable-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.K9yiMm8QSs + cat /tmp/tmp.Ihm0CLZTav + rm /tmp/tmp.K9yiMm8QSs /tmp/tmp.Ihm0CLZTav + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-unreachable-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-unreachable-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + log 'compare_kubectl: statefulset/version-service-unreachable-rs0 OK' + set +o xtrace [2025-12-10T15:24:42+0000] compare_kubectl: statefulset/version-service-unreachable-rs0 OK + desc 'create user myApp' + set +o xtrace ----------------------------------------------------------------------------------- create user myApp ----------------------------------------------------------------------------------- + run_mongo 'db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' userAdmin:userAdmin123456@version-service-unreachable-rs0.version-service-22351 + local 'command=db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})' + local uri=userAdmin:userAdmin123456@version-service-unreachable-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fopUNBPRKp +++ mktemp ++ local LAST_ERR=/tmp/tmp.q49dQokMHT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.fopUNBPRKp ++ cat /tmp/tmp.q49dQokMHT ++ rm /tmp/tmp.fopUNBPRKp /tmp/tmp.q49dQokMHT ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ userAdmin:userAdmin123456@version-service-unreachable-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-unreachable-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.zcVtCdnbkY ++ mktemp + local LAST_ERR=/tmp/tmp.kdXSXK9U2H + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''db.createUser({user: "myApp", pwd: "myPass", roles: [{ db: "myApp", role: "readWrite" }]})\n'\'' | mongo mongodb+srv://userAdmin:userAdmin123456@version-service-unreachable-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zcVtCdnbkY Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-unreachable-rs0-0.version-service-unreachable-rs0.version-service-22351.svc.cluster.local:27017,version-service-unreachable-rs0-1.version-service-unreachable-rs0.version-service-22351.svc.cluster.local:27017,version-service-unreachable-rs0-2.version-service-unreachable-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("053177d8-fe14-4100-80de-88a7ae3ae86f") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.kdXSXK9U2H + rm /tmp/tmp.zcVtCdnbkY /tmp/tmp.kdXSXK9U2H + return 0 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongo 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@version-service-unreachable-rs0.version-service-22351 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@version-service-unreachable-rs0.version-service-22351 + local driver=mongodb+srv + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZyAILtTi9G +++ mktemp ++ local LAST_ERR=/tmp/tmp.yNQ6BmZpYk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.ZyAILtTi9G ++ cat /tmp/tmp.yNQ6BmZpYk ++ rm /tmp/tmp.ZyAILtTi9G /tmp/tmp.yNQ6BmZpYk ++ return 0 + local client_container=psmdb-client-696897d69b-h6bsz + local mongo_flag= + [[ myApp:myPass@version-service-unreachable-rs0.version-service-22351 == *cfg* ]] + replica_set=rs0 + kubectl_bin exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-unreachable-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' ++ mktemp + local LAST_OUT=/tmp/tmp.Qo7HL9Ih4M ++ mktemp + local LAST_ERR=/tmp/tmp.TEVF7GIjxd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl exec psmdb-client-696897d69b-h6bsz -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb+srv://myApp:myPass@version-service-unreachable-rs0.version-service-22351.svc.cluster.local/admin?ssl=false\&replicaSet=rs0 ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Qo7HL9Ih4M Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://version-service-unreachable-rs0-2.version-service-unreachable-rs0.version-service-22351.svc.cluster.local:27017,version-service-unreachable-rs0-0.version-service-unreachable-rs0.version-service-22351.svc.cluster.local:27017,version-service-unreachable-rs0-1.version-service-unreachable-rs0.version-service-22351.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb&replicaSet=rs0&ssl=false Implicit session: session { "id" : UUID("5918cdb7-ee61-42f0-97f2-510c942e4e9a") } Percona Server for MongoDB server version: v8.0.16-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.TEVF7GIjxd + rm /tmp/tmp.Qo7HL9Ih4M /tmp/tmp.TEVF7GIjxd + return 0 + compare_kubectl statefulset/version-service-unreachable-rs0 + local resource=statefulset/version-service-unreachable-rs0 + local postfix= + local skip_generation_check= + local expected_result=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-unreachable-rs0.yml + local new_result=/tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + '[' -n '' -a -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-unreachable-rs0-oc.yml ']' + kubectl_bin get -o yaml statefulset/version-service-unreachable-rs0 + yq eval ' del(.metadata.ownerReferences[].apiVersion) | del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "NAMESPACE")) | del(.metadata.selfLink) | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.status) | (.. | select(tag == "!!str")) |= sub("version-service-22351", "NAME_SPACE") | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "extensions/v1beta1")) = "apps/v1" | (.. | select(. == "batch/v1beta1")) = "batch/v1" ' - ++ mktemp + local LAST_OUT=/tmp/tmp.KMfmef2OD9 ++ mktemp + local LAST_ERR=/tmp/tmp.AQrx9Lifx4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get -o yaml statefulset/version-service-unreachable-rs0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KMfmef2OD9 + cat /tmp/tmp.AQrx9Lifx4 + rm /tmp/tmp.KMfmef2OD9 /tmp/tmp.AQrx9Lifx4 + return 0 + yq -i eval 'del(.spec.persistentVolumeClaimRetentionPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + yq -i eval 'del(.spec.internalTrafficPolicy)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + yq -i eval 'del(.spec.allocateLoadBalancerNodePorts)' /tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + [[ /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-unreachable-rs0.yml == */cronjob* ]] + '[' -n '' ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/e2e-tests/version-service/compare/statefulset_version-service-unreachable-rs0.yml /tmp/tmp.QfOIVStdwq/statefulset_version-service-unreachable-rs0.yml + log 'compare_kubectl: statefulset/version-service-unreachable-rs0 OK' + set +o xtrace [2025-12-10T15:24:45+0000] compare_kubectl: statefulset/version-service-unreachable-rs0 OK + pods=($(kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name)) ++ kubectl get pods -l app.kubernetes.io/name=percona-server-mongodb -o=name + '[' 3 -eq 0 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-unreachable-rs0-0 -o 'jsonpath={.spec.containers[0].image}' + img=perconalab/percona-server-mongodb-operator:main-mongod8.0 + '[' perconalab/percona-server-mongodb-operator:main-mongod8.0 '!=' perconalab/percona-server-mongodb-operator:main-mongod8.0 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-unreachable-rs0-1 -o 'jsonpath={.spec.containers[0].image}' + img=perconalab/percona-server-mongodb-operator:main-mongod8.0 + '[' perconalab/percona-server-mongodb-operator:main-mongod8.0 '!=' perconalab/percona-server-mongodb-operator:main-mongod8.0 ']' + for pod in "${pods[@]}" ++ kubectl get pod/version-service-unreachable-rs0-2 -o 'jsonpath={.spec.containers[0].image}' + img=perconalab/percona-server-mongodb-operator:main-mongod8.0 + '[' perconalab/percona-server-mongodb-operator:main-mongod8.0 '!=' perconalab/percona-server-mongodb-operator:main-mongod8.0 ']' + kubectl_bin delete psmdb --all ++ mktemp + local LAST_OUT=/tmp/tmp.lIL3Nd1qm6 ++ mktemp + local LAST_ERR=/tmp/tmp.JNuVOSXHad + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete psmdb --all + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.lIL3Nd1qm6 perconaservermongodb.psmdb.percona.com "version-service-unreachable" deleted from version-service-22351 namespace + cat /tmp/tmp.JNuVOSXHad + rm /tmp/tmp.lIL3Nd1qm6 /tmp/tmp.JNuVOSXHad + return 0 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.xEpAGue4jn +++ mktemp ++ local LAST_ERR=/tmp/tmp.JEteYFoi5V ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xEpAGue4jn ++ cat /tmp/tmp.JEteYFoi5V ++ rm /tmp/tmp.xEpAGue4jn /tmp/tmp.JEteYFoi5V ++ return 0 + kubectl delete pod -n psmdb-operator percona-server-mongodb-operator-79456db78d-mbbkw pod "percona-server-mongodb-operator-79456db78d-mbbkw" deleted from psmdb-operator namespace + sleep 10 + destroy version-service-22351 + local namespace=version-service-22351 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_backups + desc 'Delete psmdb-backup' + set +o xtrace ----------------------------------------------------------------------------------- Delete psmdb-backup ----------------------------------------------------------------------------------- ++ kubectl_bin get psmdb-backup --no-headers ++ wc -l +++ mktemp ++ local LAST_OUT=/tmp/tmp.a0CRC7woJB +++ mktemp ++ local LAST_ERR=/tmp/tmp.7RqbtDC3Hh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb-backup --no-headers ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.a0CRC7woJB ++ cat /tmp/tmp.7RqbtDC3Hh No resources found in version-service-22351 namespace. ++ rm /tmp/tmp.a0CRC7woJB /tmp/tmp.7RqbtDC3Hh ++ return 0 + '[' 0 '!=' 0 ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.aJWw3c85XK ++ mktemp + local LAST_ERR=/tmp/tmp.P5r0Gdyzrq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.aJWw3c85XK customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.P5r0Gdyzrq + rm /tmp/tmp.aJWw3c85XK /tmp/tmp.P5r0Gdyzrq + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.iFwkyrQzU5 ++ mktemp + local LAST_ERR=/tmp/tmp.AthL7pzODT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.iFwkyrQzU5 + cat /tmp/tmp.AthL7pzODT + rm /tmp/tmp.iFwkyrQzU5 /tmp/tmp.AthL7pzODT + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.fVPFAVoIVg ++ mktemp + local LAST_ERR=/tmp/tmp.oCNJNJk94t + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fVPFAVoIVg + cat /tmp/tmp.oCNJNJk94t + rm /tmp/tmp.fVPFAVoIVg /tmp/tmp.oCNJNJk94t + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' No resources found + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: resource(s) were provided, but no name was specified + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.OMhKZOpFwB ++ mktemp + local LAST_ERR=/tmp/tmp.RCouTBw1UN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OMhKZOpFwB + cat /tmp/tmp.RCouTBw1UN + rm /tmp/tmp.OMhKZOpFwB /tmp/tmp.RCouTBw1UN + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.539QipaxiM ++ mktemp + local LAST_ERR=/tmp/tmp.C6GCc5Wc0p + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2044/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.539QipaxiM clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.C6GCc5Wc0p + rm /tmp/tmp.539QipaxiM /tmp/tmp.C6GCc5Wc0p + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.a9xRNkWKft ++ mktemp + local LAST_ERR=/tmp/tmp.MyzGqYAZu4 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.a9xRNkWKft + cat /tmp/tmp.MyzGqYAZu4 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.a9xRNkWKft + cat /tmp/tmp.MyzGqYAZu4 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in $(seq 0 2) + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.a9xRNkWKft + cat /tmp/tmp.MyzGqYAZu4 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.a9xRNkWKft + cat /tmp/tmp.MyzGqYAZu4 Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-tokenrequest" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.19.1/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.a9xRNkWKft /tmp/tmp.MyzGqYAZu4 + return 1 + true + '[' -n '' ']' + '[' -n psmdb-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace version-service-22351 + rm -rf /tmp/tmp.QfOIVStdwq + kubectl_bin delete --grace-period=0 --force=true namespace psmdb-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.gQlhKAacLy + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.IVGXfAcChZ + local LAST_ERR=/tmp/tmp.p3GZ1LEX4E + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.2yDfmgv7zw + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace version-service-22351 + for i in $(seq 0 2) + set +e + kubectl delete --grace-period=0 --force=true namespace psmdb-operator