Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/logs/upgrade-consistency-sharded-tls.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + CLUSTER=some-name + main + create_infra upgrade-consistency-sharded-tls-25468 + local ns=upgrade-consistency-sharded-tls-25468 + [[ 1 == 1 ]] + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.DfkTvcSj1B ++ mktemp + local LAST_ERR=/tmp/tmp.QtJlR1Kc31 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DfkTvcSj1B customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.QtJlR1Kc31 + rm /tmp/tmp.DfkTvcSj1B /tmp/tmp.QtJlR1Kc31 + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/crd.yaml ++ grep -v '\-\-\-' grep: warning: stray \ before - grep: warning: stray \ before - + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.EsKlbtUpZy ++ mktemp + local LAST_ERR=/tmp/tmp.3EurBLi7Fh + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EsKlbtUpZy + cat /tmp/tmp.3EurBLi7Fh + rm /tmp/tmp.EsKlbtUpZy /tmp/tmp.3EurBLi7Fh + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.SNDkMqQIH1 ++ mktemp + local LAST_ERR=/tmp/tmp.8jL8BOO2XG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SNDkMqQIH1 + cat /tmp/tmp.8jL8BOO2XG + rm /tmp/tmp.SNDkMqQIH1 /tmp/tmp.8jL8BOO2XG + return 0 + for crd_name in $(yq eval '.metadata.name' "${src_dir}/deploy/crd.yaml" | grep -v '\-\-\-') + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch perconaservermongodbs.psmdb.percona.com -n upgrade-consistency-sharded-tls-9589 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaservermongodb.psmdb.percona.com/some-name patched + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.8YOVXUBc5V ++ mktemp + local LAST_ERR=/tmp/tmp.VfzHVrvgmm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8YOVXUBc5V customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com condition met + cat /tmp/tmp.VfzHVrvgmm + rm /tmp/tmp.8YOVXUBc5V /tmp/tmp.VfzHVrvgmm + return 0 + local rbac_yaml=rbac.yaml + '[' -n psmdb-operator ']' + rbac_yaml=cw-rbac.yaml + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/cw-rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.uO9U0D6IvK ++ mktemp + local LAST_ERR=/tmp/tmp.xOfH1ERvSv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/cw-rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.uO9U0D6IvK clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.xOfH1ERvSv + rm /tmp/tmp.uO9U0D6IvK /tmp/tmp.xOfH1ERvSv + return 0 + check_crd_for_deletion PR-2232-bb80a94f + local git_tag=PR-2232-bb80a94f ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-2232-bb80a94f/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/sbin/sed s/---//g ++ /usr/sbin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in $(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '.metadata.name' | $sed 's/---//g' | $sed ':a;N;$!ba;s/\n/ /g') ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zdCfhA7qGg +++ mktemp ++ local LAST_ERR=/tmp/tmp.2FHy2febrx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.zdCfhA7qGg ++ cat /tmp/tmp.2FHy2febrx Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.zdCfhA7qGg ++ cat /tmp/tmp.2FHy2febrx Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.zdCfhA7qGg ++ cat /tmp/tmp.2FHy2febrx Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.zdCfhA7qGg ++ cat /tmp/tmp.2FHy2febrx Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.zdCfhA7qGg /tmp/tmp.2FHy2febrx ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n psmdb-operator ']' + create_namespace psmdb-operator + local namespace=psmdb-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' ++ mktemp + desc 'cleaned up old namespaces psmdb-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace psmdb-operator --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.EG1SgvonRR ++ mktemp + local LAST_OUT=/tmp/tmp.VkTopMDjnr + local LAST_ERR=/tmp/tmp.XNBDw9L5CS + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.USZ4QBWYrA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace psmdb-operator --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EG1SgvonRR + cat /tmp/tmp.XNBDw9L5CS + rm /tmp/tmp.EG1SgvonRR /tmp/tmp.XNBDw9L5CS + return 0 namespace "cert-manager" deleted namespace "upgrade-consistency-sharded-tls-9589" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VkTopMDjnr namespace "psmdb-operator" deleted + cat /tmp/tmp.USZ4QBWYrA + rm /tmp/tmp.VkTopMDjnr /tmp/tmp.USZ4QBWYrA + return 0 + kubectl_bin wait --for=delete namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.xtRLsxXD0s ++ mktemp + local LAST_ERR=/tmp/tmp.pkbNwfCv7x + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xtRLsxXD0s + cat /tmp/tmp.pkbNwfCv7x + rm /tmp/tmp.xtRLsxXD0s /tmp/tmp.pkbNwfCv7x + return 0 + desc 'create namespace psmdb-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.0obiq1yEZR ++ mktemp + local LAST_ERR=/tmp/tmp.a7G3ee3dO2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0obiq1yEZR namespace/psmdb-operator created + cat /tmp/tmp.a7G3ee3dO2 + rm /tmp/tmp.0obiq1yEZR /tmp/tmp.a7G3ee3dO2 + return 0 + set_kube_ctx psmdb-operator + local namespace=psmdb-operator ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.uhID2wSlIf +++ mktemp ++ local LAST_ERR=/tmp/tmp.mjUFOhLmyf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.uhID2wSlIf ++ cat /tmp/tmp.mjUFOhLmyf ++ rm /tmp/tmp.uhID2wSlIf /tmp/tmp.mjUFOhLmyf ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2232-bb80a94f-13-cluster4 --namespace=psmdb-operator ++ mktemp + local LAST_OUT=/tmp/tmp.0ph4y9Lgf2 ++ mktemp + local LAST_ERR=/tmp/tmp.TUyrufZZ5A + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2232-bb80a94f-13-cluster4 --namespace=psmdb-operator + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0ph4y9Lgf2 Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2232-bb80a94f-13-cluster4" modified. + cat /tmp/tmp.TUyrufZZ5A + rm /tmp/tmp.0ph4y9Lgf2 /tmp/tmp.TUyrufZZ5A + return 0 + deploy_operator + desc 'start PSMDB operator: docker.io/perconalab/percona-server-mongodb-operator:PR-2232-bb80a94f' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator: docker.io/perconalab/percona-server-mongodb-operator:PR-2232-bb80a94f ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/upgrade-consistency-sharded-tls/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.vw2HA9kPpD ++ mktemp + local LAST_ERR=/tmp/tmp.GMnKdPvziC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vw2HA9kPpD customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.GMnKdPvziC + rm /tmp/tmp.vw2HA9kPpD /tmp/tmp.GMnKdPvziC + return 0 + '[' -n psmdb-operator ']' + apply_rbac cw-rbac + local operator_namespace=psmdb-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: psmdb-operator^' + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.8rUNgtKm65 ++ mktemp + local LAST_ERR=/tmp/tmp.jM5v4L9y9f + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.8rUNgtKm65 clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.jM5v4L9y9f + rm /tmp/tmp.8rUNgtKm65 /tmp/tmp.jM5v4L9y9f + return 0 + yq eval ' (.spec.template.spec.containers[].image = "docker.io/perconalab/percona-server-mongodb-operator:PR-2232-bb80a94f") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/cw-operator.yaml + kubectl_bin apply -n psmdb-operator -f - ++ mktemp + local LAST_OUT=/tmp/tmp.4l0nwKmBD8 ++ mktemp + local LAST_ERR=/tmp/tmp.bVvCZjR9dH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -n psmdb-operator -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.4l0nwKmBD8 deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.bVvCZjR9dH + rm /tmp/tmp.4l0nwKmBD8 /tmp/tmp.bVvCZjR9dH + return 0 + sleep 20 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.cO3OxaUnWO +++ mktemp ++ local LAST_ERR=/tmp/tmp.D4aoSvCX2X ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.cO3OxaUnWO ++ cat /tmp/tmp.D4aoSvCX2X ++ rm /tmp/tmp.cO3OxaUnWO /tmp/tmp.D4aoSvCX2X ++ return 0 + wait_operator_pod percona-server-mongodb-operator-c9c7475c6-q2hpd + local pod=percona-server-mongodb-operator-c9c7475c6-q2hpd + set +o xtrace waiting for pod/percona-server-mongodb-operator-c9c7475c6-q2hpd to be ready.OK + echo 'Print operator info from log' Print operator info from log + grep 'Manager starting up' ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.PPw39MNnAx +++ mktemp ++ local LAST_ERR=/tmp/tmp.0ibJTCZ08Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' -n psmdb-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PPw39MNnAx ++ cat /tmp/tmp.0ibJTCZ08Q ++ rm /tmp/tmp.PPw39MNnAx /tmp/tmp.0ibJTCZ08Q ++ return 0 + kubectl_bin logs -n psmdb-operator percona-server-mongodb-operator-c9c7475c6-q2hpd ++ mktemp + local LAST_OUT=/tmp/tmp.GU4BfUKgYO ++ mktemp + local LAST_ERR=/tmp/tmp.VILPXwaras + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl logs -n psmdb-operator percona-server-mongodb-operator-c9c7475c6-q2hpd + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.GU4BfUKgYO + cat /tmp/tmp.VILPXwaras + rm /tmp/tmp.GU4BfUKgYO /tmp/tmp.VILPXwaras + return 0 2026-03-11T18:52:53.518Z INFO setup Manager starting up {"gitCommit": "bb80a94fe259eaaaf83e80d96a720e6fe7708ede", "gitBranch": "PR-2232-bb80a94f", "buildTime": "", "goVersion": "go1.25.8", "os": "linux", "arch": "amd64"} + create_namespace upgrade-consistency-sharded-tls-25468 + local namespace=upgrade-consistency-sharded-tls-25468 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|psmdb-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces upgrade-consistency-sharded-tls-25468' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces upgrade-consistency-sharded-tls-25468 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace upgrade-consistency-sharded-tls-25468 --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.DLaL0uzGTA ++ mktemp + local LAST_OUT=/tmp/tmp.7rC69jvV0m ++ mktemp + local LAST_ERR=/tmp/tmp.gFIb49JuUi + local exit_status=0 + local timeout=4 + local LAST_ERR=/tmp/tmp.kbGeMyJwII + local exit_status=0 + local timeout=4 ++ seq 0 2 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl get ns + for i in $(seq 0 2) + set +e + kubectl delete namespace upgrade-consistency-sharded-tls-25468 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DLaL0uzGTA + cat /tmp/tmp.kbGeMyJwII + rm /tmp/tmp.DLaL0uzGTA /tmp/tmp.kbGeMyJwII + return 0 error: resource(s) were provided, but no name was specified + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.7rC69jvV0m + cat /tmp/tmp.gFIb49JuUi + rm /tmp/tmp.7rC69jvV0m /tmp/tmp.gFIb49JuUi + return 0 + kubectl_bin wait --for=delete namespace upgrade-consistency-sharded-tls-25468 ++ mktemp + local LAST_OUT=/tmp/tmp.xZI2u5DoHi ++ mktemp + local LAST_ERR=/tmp/tmp.nnSWDJz2Vb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl wait --for=delete namespace upgrade-consistency-sharded-tls-25468 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xZI2u5DoHi + cat /tmp/tmp.nnSWDJz2Vb + rm /tmp/tmp.xZI2u5DoHi /tmp/tmp.nnSWDJz2Vb + return 0 + desc 'create namespace upgrade-consistency-sharded-tls-25468' + set +o xtrace ----------------------------------------------------------------------------------- create namespace upgrade-consistency-sharded-tls-25468 ----------------------------------------------------------------------------------- + kubectl_bin create namespace upgrade-consistency-sharded-tls-25468 ++ mktemp + local LAST_OUT=/tmp/tmp.tkdO17Ee8H ++ mktemp + local LAST_ERR=/tmp/tmp.U3ktg7pJgP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace upgrade-consistency-sharded-tls-25468 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tkdO17Ee8H namespace/upgrade-consistency-sharded-tls-25468 created + cat /tmp/tmp.U3ktg7pJgP + rm /tmp/tmp.tkdO17Ee8H /tmp/tmp.U3ktg7pJgP + return 0 + set_kube_ctx upgrade-consistency-sharded-tls-25468 + local namespace=upgrade-consistency-sharded-tls-25468 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.yO8li6qZLk +++ mktemp ++ local LAST_ERR=/tmp/tmp.qSfJ9h8fQW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yO8li6qZLk ++ cat /tmp/tmp.qSfJ9h8fQW ++ rm /tmp/tmp.yO8li6qZLk /tmp/tmp.qSfJ9h8fQW ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2232-bb80a94f-13-cluster4 --namespace=upgrade-consistency-sharded-tls-25468 ++ mktemp + local LAST_OUT=/tmp/tmp.0Y62TtMtbv ++ mktemp + local LAST_ERR=/tmp/tmp.tQWgWy9tzI + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-2232-bb80a94f-13-cluster4 --namespace=upgrade-consistency-sharded-tls-25468 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.0Y62TtMtbv Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-2232-bb80a94f-13-cluster4" modified. + cat /tmp/tmp.tQWgWy9tzI + rm /tmp/tmp.0Y62TtMtbv /tmp/tmp.tQWgWy9tzI + return 0 + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.mqq580kSBn ++ mktemp + local LAST_ERR=/tmp/tmp.MArxkSvD2T + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.mqq580kSBn namespace/cert-manager created + cat /tmp/tmp.MArxkSvD2T + rm /tmp/tmp.mqq580kSBn /tmp/tmp.MArxkSvD2T + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.rYCs1lSpUA ++ mktemp + local LAST_ERR=/tmp/tmp.iFTikf0wM7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.rYCs1lSpUA namespace/cert-manager labeled + cat /tmp/tmp.iFTikf0wM7 + rm /tmp/tmp.rYCs1lSpUA /tmp/tmp.iFTikf0wM7 + return 0 + kubectl_bin apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.Cg6byoplEx ++ mktemp + local LAST_ERR=/tmp/tmp.vlqm4bSU9U + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.19.3/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Cg6byoplEx namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.vlqm4bSU9U Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.Cg6byoplEx /tmp/tmp.vlqm4bSU9U + return 0 + kubectl_bin -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready ++ mktemp + local LAST_OUT=/tmp/tmp.3IIAJF0au9 ++ mktemp + local LAST_ERR=/tmp/tmp.2Azfxt6ByB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl -n cert-manager wait pod -l app.kubernetes.io/instance=cert-manager --for=condition=ready + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3IIAJF0au9 pod/cert-manager-559d798845-qp769 condition met pod/cert-manager-cainjector-64958d9c7c-rl2th condition met pod/cert-manager-webhook-7fb6f99b56-bxbkw condition met + cat /tmp/tmp.2Azfxt6ByB + rm /tmp/tmp.3IIAJF0au9 /tmp/tmp.2Azfxt6ByB + return 0 + sleep 120 + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.EN94lfKS4g ++ mktemp + local LAST_ERR=/tmp/tmp.j4CkPa5WBF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.EN94lfKS4g secret/some-users created + cat /tmp/tmp.j4CkPa5WBF + rm /tmp/tmp.EN94lfKS4g /tmp/tmp.j4CkPa5WBF + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/conf/client_with_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.gSJQQjg4BP ++ mktemp + local LAST_ERR=/tmp/tmp.hf6G6U7cEM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/conf/client_with_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gSJQQjg4BP deployment.apps/psmdb-client created + cat /tmp/tmp.hf6G6U7cEM + rm /tmp/tmp.gSJQQjg4BP /tmp/tmp.hf6G6U7cEM + return 0 + deploy_cmctl + local service_account=cmctl + /usr/sbin/sed -e s/percona-server-mongodb-operator/cmctl/g /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/deploy/rbac.yaml + yq '(select(.rules).rules[] | select(contains({"apiGroups": ["cert-manager.io"]}))).resources += "certificates/status"' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.QFNMRYBGb8 ++ mktemp + local LAST_ERR=/tmp/tmp.vxFIXXoTub + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.QFNMRYBGb8 role.rbac.authorization.k8s.io/cmctl created serviceaccount/cmctl created rolebinding.rbac.authorization.k8s.io/service-account-cmctl created + cat /tmp/tmp.vxFIXXoTub + rm /tmp/tmp.QFNMRYBGb8 /tmp/tmp.vxFIXXoTub + return 0 + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/conf/cmctl.yml ++ mktemp + local LAST_OUT=/tmp/tmp.fygcGAvCWw ++ mktemp + local LAST_ERR=/tmp/tmp.Yva4qHqkZv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/conf/cmctl.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fygcGAvCWw deployment.apps/cmctl created + cat /tmp/tmp.Yva4qHqkZv + rm /tmp/tmp.fygcGAvCWw /tmp/tmp.Yva4qHqkZv + return 0 + desc 'create first PSMDB cluster 1.21.2 some-name' + set +o xtrace ----------------------------------------------------------------------------------- create first PSMDB cluster 1.21.2 some-name ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/upgrade-consistency-sharded-tls/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/upgrade-consistency-sharded-tls/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-2232/e2e-tests/upgrade-consistency-sharded-tls/conf/some-name.yml + yq eval '(.spec | select(.image == null)).image = "docker.io/perconalab/percona-server-mongodb-operator:main-mongod8.0"' ++ mktemp + yq eval '(.spec | select(has("pmm"))).pmm.image = "docker.io/percona/pmm-client:2.44.1-1"' + yq eval '(.spec | select(has("initImage"))).initImage = "docker.io/perconalab/percona-server-mongodb-operator:PR-2232-bb80a94f"' + local LAST_OUT=/tmp/tmp.9OxPcIi9fP + yq eval '(.spec | select(has("backup"))).backup.image = "docker.io/perconalab/percona-server-mongodb-operator:main-backup"' + /usr/sbin/sed -e s/NAME_SPACE/upgrade-consistency-sharded-tls-25468/g ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_ERR=/tmp/tmp.ntC4X8hyjE + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in $(seq 0 2) + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9OxPcIi9fP perconaservermongodb.psmdb.percona.com/some-name created + cat /tmp/tmp.ntC4X8hyjE + rm /tmp/tmp.9OxPcIi9fP /tmp/tmp.ntC4X8hyjE + return 0 + desc 'check if Pod started' + set +o xtrace ----------------------------------------------------------------------------------- check if Pod started ----------------------------------------------------------------------------------- + wait_cluster + wait_for_running some-name-rs0 3 + local name=some-name-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=some-name ++ seq 0 2 + for i in $(seq 0 $last_pod) + [[ 0 -eq 2 ]] + wait_pod some-name-rs0-0 + local pod=some-name-rs0-0 + set +o xtrace waiting for pod/some-name-rs0-0 to be ready.............OK + for i in $(seq 0 $last_pod) + [[ 1 -eq 2 ]] + wait_pod some-name-rs0-1 + local pod=some-name-rs0-1 + set +o xtrace waiting for pod/some-name-rs0-1 to be ready...............OK + for i in $(seq 0 $last_pod) + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nW7DbgkCik +++ mktemp ++ local LAST_ERR=/tmp/tmp.UZzeUAMbYY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in $(seq 0 2) ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.nW7DbgkCik ++ cat /tmp/tmp.UZzeUAMbYY ++ rm /tmp/tmp.nW7DbgkCik /tmp/tmp.UZzeUAMbYY ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod some-name-rs0-2 + local pod=some-name-rs0-2 + set +o xtrace waiting for pod/some-name-rs0-2 to be ready.............Terminated