Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/logs/cross-site-sharded.log WARNING: version difference between client (1.31) and server (1.27) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.31) and server (1.27) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.31) and server (1.27) exceeds the supported minor version skew of +/-1 ++ get_mongod_ver_from_image perconalab/percona-server-mongodb-operator:main-mongod7.0 ++ local image=perconalab/percona-server-mongodb-operator:main-mongod7.0 +++ run_simple_cli_inside_image perconalab/percona-server-mongodb-operator:main-mongod7.0 'mongod --version' +++ local image=perconalab/percona-server-mongodb-operator:main-mongod7.0 +++ local 'cli=mongod --version' +++ local pod_name=8637 +++ /usr/bin/sed -r 's/^.*db version v(([0-9]+\.){2}[0-9]+-[0-9]+).*$/\1/g' +++ kubectl_bin -n default run 8637 --image=perconalab/percona-server-mongodb-operator:main-mongod7.0 --restart=Never --command -- sleep infinity ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OBOVZAC32Y ++++ mktemp +++ local LAST_ERR=/tmp/tmp.PyfNkeC7hs +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default run 8637 --image=perconalab/percona-server-mongodb-operator:main-mongod7.0 --restart=Never --command -- sleep infinity +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.OBOVZAC32Y +++ cat /tmp/tmp.PyfNkeC7hs +++ rm /tmp/tmp.OBOVZAC32Y /tmp/tmp.PyfNkeC7hs +++ return 0 +++ kubectl_bin -n default wait --for=condition=Ready pod/8637 ++++ mktemp +++ local LAST_OUT=/tmp/tmp.03YQNzy9nq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Ue2GwkvVUi +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default wait --for=condition=Ready pod/8637 +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.03YQNzy9nq +++ cat /tmp/tmp.Ue2GwkvVUi +++ rm /tmp/tmp.03YQNzy9nq /tmp/tmp.Ue2GwkvVUi +++ return 0 ++++ kubectl_bin -n default exec 8637 -- mongod --version +++++ mktemp ++++ local LAST_OUT=/tmp/tmp.4JTpuQ29WB +++++ mktemp ++++ local LAST_ERR=/tmp/tmp.aTefn9XRbY ++++ local exit_status=0 ++++ local timeout=4 +++++ seq 0 2 ++++ for i in '$(seq 0 2)' ++++ set +e ++++ kubectl -n default exec 8637 -- mongod --version ++++ exit_status=0 ++++ set -e ++++ '[' 0 '!=' 0 -a -n 1 ']' ++++ break ++++ cat /tmp/tmp.4JTpuQ29WB ++++ cat /tmp/tmp.aTefn9XRbY ++++ rm /tmp/tmp.4JTpuQ29WB /tmp/tmp.aTefn9XRbY ++++ return 0 +++ local 'output=db version v7.0.14-8 Build Info: { "version": "7.0.14-8", "gitVersion": "4479fd75bea6bf98a934c29007352dd36f3e1db5", "openSSLVersion": "OpenSSL 1.1.1k FIPS 25 Mar 2021", "modules": [], "proFeatures": [], "allocator": "tcmalloc", "environment": { "distarch": "x86_64", "target_arch": "x86_64" } }' +++ kubectl_bin -n default delete pod/8637 --grace-period=0 --force ++++ mktemp +++ local LAST_OUT=/tmp/tmp.7gLM9bnzcS ++++ mktemp +++ local LAST_ERR=/tmp/tmp.yE5VVpVHS4 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl -n default delete pod/8637 --grace-period=0 --force +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.7gLM9bnzcS +++ cat /tmp/tmp.yE5VVpVHS4 Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. +++ rm /tmp/tmp.7gLM9bnzcS /tmp/tmp.yE5VVpVHS4 +++ return 0 +++ echo db version v7.0.14-8 Build Info: '{' '"version":' '"7.0.14-8",' '"gitVersion":' '"4479fd75bea6bf98a934c29007352dd36f3e1db5",' '"openSSLVersion":' '"OpenSSL' 1.1.1k FIPS 25 Mar '2021",' '"modules":' '[],' '"proFeatures":' '[],' '"allocator":' '"tcmalloc",' '"environment":' '{' '"distarch":' '"x86_64",' '"target_arch":' '"x86_64"' '}' '}' ++ version_info=7.0.14-8 ++ [[ ! 7.0.14-8 =~ ^([0-9]+\.){2}[0-9]+-[0-9]+$ ]] ++ echo 7.0.14-8 + FULL_VER=7.0.14-8 + MONGO_VER=7.0 + unset OPERATOR_NS + main_cluster=cross-site-sharded-main + replica_cluster=cross-site-sharded-replica + desc 'create main cluster' + set +o xtrace ----------------------------------------------------------------------------------- create main cluster ----------------------------------------------------------------------------------- + create_infra cross-site-sharded-30230 + local ns=cross-site-sharded-30230 + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.piHmuCvmfo ++ mktemp + local LAST_ERR=/tmp/tmp.prhz9QhsBN + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.piHmuCvmfo customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.prhz9QhsBN + rm /tmp/tmp.piHmuCvmfo /tmp/tmp.prhz9QhsBN + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.hU1N1j5BJk ++ mktemp + local LAST_ERR=/tmp/tmp.g53f6RF1YB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.hU1N1j5BJk + cat /tmp/tmp.g53f6RF1YB + rm /tmp/tmp.hU1N1j5BJk /tmp/tmp.g53f6RF1YB + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.ZatXHebqnB ++ mktemp + local LAST_ERR=/tmp/tmp.0d24ICouMj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.ZatXHebqnB + cat /tmp/tmp.0d24ICouMj + rm /tmp/tmp.ZatXHebqnB /tmp/tmp.0d24ICouMj + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.TQr8g2ZrXH ++ mktemp + local LAST_ERR=/tmp/tmp.3jJyeISWYm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TQr8g2ZrXH + cat /tmp/tmp.3jJyeISWYm + rm /tmp/tmp.TQr8g2ZrXH /tmp/tmp.3jJyeISWYm + return 0 + local rbac_yaml=rbac.yaml + '[' -n '' ']' + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.KD1ke9yOzj ++ mktemp + local LAST_ERR=/tmp/tmp.OjSibYcmYF + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KD1ke9yOzj role.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted serviceaccount "percona-server-mongodb-operator" deleted rolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.OjSibYcmYF + rm /tmp/tmp.KD1ke9yOzj /tmp/tmp.OjSibYcmYF + return 0 + check_crd_for_deletion PR-1608-f10c3c44 + local git_tag=PR-1608-f10c3c44 ++ curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/PR-1608-f10c3c44/deploy/crd.yaml ++ yq eval .metadata.name ++ /usr/bin/sed s/---//g ++ /usr/bin/sed ':a;N;$!ba;s/\n/ /g' + for crd_name in '$(curl -s https://raw.githubusercontent.com/percona/percona-server-mongodb-operator/${git_tag}/deploy/crd.yaml | yq eval '\''.metadata.name'\'' | $sed '\''s/---//g'\'' | $sed '\'':a;N;$!ba;s/\n/ /g'\'')' ++ kubectl_bin get crd/null -o 'jsonpath={.status.conditions[-1].type}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ClYAEQ1zUv +++ mktemp ++ local LAST_ERR=/tmp/tmp.v492VoD35L ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.ClYAEQ1zUv ++ cat /tmp/tmp.v492VoD35L Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 0 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.ClYAEQ1zUv ++ cat /tmp/tmp.v492VoD35L Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 4 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get crd/null -o 'jsonpath={.status.conditions[-1].type}' ++ exit_status=1 ++ set -e ++ '[' 1 '!=' 0 -a -n 1 ']' ++ cat /tmp/tmp.ClYAEQ1zUv ++ cat /tmp/tmp.v492VoD35L Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ sleep 8 ++ cat /tmp/tmp.ClYAEQ1zUv ++ cat /tmp/tmp.v492VoD35L Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ++ rm /tmp/tmp.ClYAEQ1zUv /tmp/tmp.v492VoD35L ++ return 1 + [[ '' == \T\e\r\m\i\n\a\t\i\n\g ]] + '[' -n '' ']' + create_namespace cross-site-sharded-30230 + local namespace=cross-site-sharded-30230 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ tail -n1 ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + desc 'destroy chaos-mesh' + set +o xtrace ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default|Terminating|psmdb-operator|openshift|gke-mcs|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces cross-site-sharded-30230' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces cross-site-sharded-30230 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace cross-site-sharded-30230 --ignore-not-found + xargs kubectl delete ns + awk '{print$1}' ++ mktemp + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.9TeeTB1K1W ++ mktemp + local LAST_OUT=/tmp/tmp.KveyXloIIi + local LAST_ERR=/tmp/tmp.JGGXc6cwBQ + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.wFDlaIwZvT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace cross-site-sharded-30230 --ignore-not-found + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.KveyXloIIi + cat /tmp/tmp.wFDlaIwZvT + rm /tmp/tmp.KveyXloIIi /tmp/tmp.wFDlaIwZvT + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9TeeTB1K1W + cat /tmp/tmp.JGGXc6cwBQ + rm /tmp/tmp.9TeeTB1K1W /tmp/tmp.JGGXc6cwBQ + return 0 + kubectl_bin wait --for=delete namespace cross-site-sharded-30230 ++ mktemp + local LAST_OUT=/tmp/tmp.fjoJUcO8Uh ++ mktemp + local LAST_ERR=/tmp/tmp.sDbbb9hMrq + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace cross-site-sharded-30230 namespace "cross-site-sharded-6646" deleted namespace "cross-site-sharded-replica-9573" deleted namespace "gmp-public" deleted namespace "gmp-system" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fjoJUcO8Uh + cat /tmp/tmp.sDbbb9hMrq + rm /tmp/tmp.fjoJUcO8Uh /tmp/tmp.sDbbb9hMrq + return 0 + desc 'create namespace cross-site-sharded-30230' + set +o xtrace ----------------------------------------------------------------------------------- create namespace cross-site-sharded-30230 ----------------------------------------------------------------------------------- + kubectl_bin create namespace cross-site-sharded-30230 ++ mktemp + local LAST_OUT=/tmp/tmp.xlZ85R9GZW ++ mktemp + local LAST_ERR=/tmp/tmp.X0FLVoad6b + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cross-site-sharded-30230 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xlZ85R9GZW namespace/cross-site-sharded-30230 created + cat /tmp/tmp.X0FLVoad6b + rm /tmp/tmp.xlZ85R9GZW /tmp/tmp.X0FLVoad6b + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.eENbKJhfLb +++ mktemp ++ local LAST_ERR=/tmp/tmp.cgVHsgV8qz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.eENbKJhfLb ++ cat /tmp/tmp.cgVHsgV8qz ++ rm /tmp/tmp.eENbKJhfLb /tmp/tmp.cgVHsgV8qz ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-30230 ++ mktemp + local LAST_OUT=/tmp/tmp.6DPwq4iWWZ ++ mktemp + local LAST_ERR=/tmp/tmp.r6mhMrerRm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-30230 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6DPwq4iWWZ Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7" modified. + cat /tmp/tmp.r6mhMrerRm + rm /tmp/tmp.6DPwq4iWWZ /tmp/tmp.r6mhMrerRm + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.vBllph7d0d ++ mktemp + local LAST_ERR=/tmp/tmp.kgGQ4Esr8e + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.vBllph7d0d customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.kgGQ4Esr8e + rm /tmp/tmp.vBllph7d0d /tmp/tmp.kgGQ4Esr8e + return 0 + '[' -n '' ']' + apply_rbac rbac + local operator_namespace=psmdb-operator + local rbac=rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: psmdb-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.akxtMNoHXg ++ mktemp + local LAST_ERR=/tmp/tmp.7vHM6PaOUG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.akxtMNoHXg role.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created rolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.7vHM6PaOUG + rm /tmp/tmp.akxtMNoHXg /tmp/tmp.7vHM6PaOUG + return 0 + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1608-f10c3c44") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/operator.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.BywX4c8LNg ++ mktemp + local LAST_ERR=/tmp/tmp.0Nuj0hSjY2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.BywX4c8LNg deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.0Nuj0hSjY2 + rm /tmp/tmp.BywX4c8LNg /tmp/tmp.0Nuj0hSjY2 + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AG1MxBe4uz +++ mktemp ++ local LAST_ERR=/tmp/tmp.sNp06Dr6fj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AG1MxBe4uz ++ cat /tmp/tmp.sNp06Dr6fj ++ rm /tmp/tmp.AG1MxBe4uz /tmp/tmp.sNp06Dr6fj ++ return 0 + wait_pod percona-server-mongodb-operator-f5b9b89df-gqdmp + local pod=percona-server-mongodb-operator-f5b9b89df-gqdmp + set +o xtrace waiting for pod/percona-server-mongodb-operator-f5b9b89df-gqdmp to be ready.OK + desc 'create secrets and start client' + set +o xtrace ----------------------------------------------------------------------------------- create secrets and start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.xKvk4u5mlW ++ mktemp + local LAST_ERR=/tmp/tmp.Gza043zMom + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/conf/client.yml -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.xKvk4u5mlW deployment.apps/psmdb-client created secret/cross-site-sharded-main-secrets created secret/cross-site-sharded-main-ssl created secret/cross-site-sharded-main-ssl-internal created + cat /tmp/tmp.Gza043zMom + rm /tmp/tmp.xKvk4u5mlW /tmp/tmp.Gza043zMom + return 0 + desc 'create main PSMDB cluster cross-site-sharded-main.' + set +o xtrace ----------------------------------------------------------------------------------- create main PSMDB cluster cross-site-sharded-main. ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/cross-site-sharded-main.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/cross-site-sharded-main.yml ++ mktemp + local LAST_OUT=/tmp/tmp.i2YLHPCeUI + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/cross-site-sharded-main.yml + yq eval '.spec.upgradeOptions.apply="Never"' ++ mktemp + local LAST_ERR=/tmp/tmp.laFJ7hy2Uo + local exit_status=0 + local timeout=4 + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1608-f10c3c44"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.i2YLHPCeUI perconaservermongodb.psmdb.percona.com/cross-site-sharded-main created + cat /tmp/tmp.laFJ7hy2Uo + rm /tmp/tmp.i2YLHPCeUI /tmp/tmp.laFJ7hy2Uo + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running cross-site-sharded-main-rs0 3 + local name=cross-site-sharded-main-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=cross-site-sharded-main ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-main-rs0-0 + local pod=cross-site-sharded-main-rs0-0 + set +o xtrace waiting for pod/cross-site-sharded-main-rs0-0 to be ready.............OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-main-rs0-1 + local pod=cross-site-sharded-main-rs0-1 + set +o xtrace waiting for pod/cross-site-sharded-main-rs0-1 to be ready............OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-main -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i7pOfBF3K8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0yiqjv4Wg5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-main -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.i7pOfBF3K8 ++ cat /tmp/tmp.0yiqjv4Wg5 ++ rm /tmp/tmp.i7pOfBF3K8 /tmp/tmp.0yiqjv4Wg5 ++ return 0 + [[ false == \t\r\u\e ]] + wait_pod cross-site-sharded-main-rs0-2 + local pod=cross-site-sharded-main-rs0-2 + set +o xtrace waiting for pod/cross-site-sharded-main-rs0-2 to be ready............OK ++ kubectl_bin get psmdb cross-site-sharded-main -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h2wL47TIZJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.jUfIFAZMK7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-main -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.h2wL47TIZJ ++ cat /tmp/tmp.jUfIFAZMK7 ++ rm /tmp/tmp.h2wL47TIZJ /tmp/tmp.jUfIFAZMK7 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness.......................................... + wait_for_running cross-site-sharded-main-cfg 3 false + local name=cross-site-sharded-main-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=cross-site-sharded-main ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-main-cfg-0 + local pod=cross-site-sharded-main-cfg-0 + set +o xtrace waiting for pod/cross-site-sharded-main-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-main-cfg-1 + local pod=cross-site-sharded-main-cfg-1 + set +o xtrace waiting for pod/cross-site-sharded-main-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-main -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S1JjEF1Gum +++ mktemp ++ local LAST_ERR=/tmp/tmp.byn6Fnefqr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-main -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.S1JjEF1Gum ++ cat /tmp/tmp.byn6Fnefqr ++ rm /tmp/tmp.S1JjEF1Gum /tmp/tmp.byn6Fnefqr ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod cross-site-sharded-main-cfg-2 + local pod=cross-site-sharded-main-cfg-2 + set +o xtrace waiting for pod/cross-site-sharded-main-cfg-2 to be ready.OK ++ kubectl_bin get psmdb cross-site-sharded-main -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2iRj1XySt3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ngGcjbzppE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-main -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2iRj1XySt3 ++ cat /tmp/tmp.ngGcjbzppE ++ rm /tmp/tmp.2iRj1XySt3 /tmp/tmp.ngGcjbzppE ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + desc 'create user' + set +o xtrace ----------------------------------------------------------------------------------- create user ----------------------------------------------------------------------------------- + run_mongos 'db.createUser({user:"user",pwd:"pass",roles:[{db:"app",role:"readWrite"}]})' userAdmin:userAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local 'command=db.createUser({user:"user",pwd:"pass",roles:[{db:"app",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zf71V6iz3a +++ mktemp ++ local LAST_ERR=/tmp/tmp.LffQV1QXqe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Zf71V6iz3a ++ cat /tmp/tmp.LffQV1QXqe ++ rm /tmp/tmp.Zf71V6iz3a /tmp/tmp.LffQV1QXqe ++ return 0 + local client_container=psmdb-client-6c585f8dbd-525mt + local mongo_flag= + kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''db.createUser({user:"user",pwd:"pass",roles:[{db:"app",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.VgZKmeLZad ++ mktemp + local LAST_ERR=/tmp/tmp.ySEFfLBTE7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''db.createUser({user:"user",pwd:"pass",roles:[{db:"app",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.VgZKmeLZad Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("bc9749cb-6063-477d-8af5-4f9bac0ac61e") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match Successfully added user: { "user" : "user", "roles" : [ { "db" : "app", "role" : "readWrite" } ] } bye + cat /tmp/tmp.ySEFfLBTE7 + rm /tmp/tmp.VgZKmeLZad /tmp/tmp.ySEFfLBTE7 + return 0 + sleep 2 + desc 'set chunk size to 2 MB' + set +o xtrace ----------------------------------------------------------------------------------- set chunk size to 2 MB ----------------------------------------------------------------------------------- + run_mongos 'use config\n db.settings.save( { _id:"chunksize", value: 2 } )' clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local 'command=use config\n db.settings.save( { _id:"chunksize", value: 2 } )' + local uri=clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xMFnVe3tv4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3pv7DTatDM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xMFnVe3tv4 ++ cat /tmp/tmp.3pv7DTatDM ++ rm /tmp/tmp.xMFnVe3tv4 /tmp/tmp.3pv7DTatDM ++ return 0 + local client_container=psmdb-client-6c585f8dbd-525mt + local mongo_flag= + kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''use config\n db.settings.save( { _id:"chunksize", value: 2 } )\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.nK3Wv3KQo7 ++ mktemp + local LAST_ERR=/tmp/tmp.d4ttvae3S3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''use config\n db.settings.save( { _id:"chunksize", value: 2 } )\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.nK3Wv3KQo7 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("36b3e074-2050-46e7-a5e4-549ec0213bd3") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match switched to db config WriteResult({ "nMatched" : 0, "nUpserted" : 1, "nModified" : 0, "_id" : "chunksize" }) bye + cat /tmp/tmp.d4ttvae3S3 + rm /tmp/tmp.nK3Wv3KQo7 /tmp/tmp.d4ttvae3S3 + return 0 + sleep 2 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + run_script_mongos /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/data.js user:pass@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local script=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/data.js + local uri=user:pass@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A9e1s8zAcz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ICgRU6Q9mO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.A9e1s8zAcz ++ cat /tmp/tmp.ICgRU6Q9mO ++ rm /tmp/tmp.A9e1s8zAcz /tmp/tmp.ICgRU6Q9mO ++ return 0 + local client_container=psmdb-client-6c585f8dbd-525mt + local mongo_flag= ++ basename /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/data.js + name=data.js + kubectl_bin cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/data.js cross-site-sharded-30230/psmdb-client-6c585f8dbd-525mt:/tmp ++ mktemp + local LAST_OUT=/tmp/tmp.Tw7IfoFZwO ++ mktemp + local LAST_ERR=/tmp/tmp.0Sgyx6uvbj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/data.js cross-site-sharded-30230/psmdb-client-6c585f8dbd-525mt:/tmp + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Tw7IfoFZwO + cat /tmp/tmp.0Sgyx6uvbj + rm /tmp/tmp.Tw7IfoFZwO /tmp/tmp.0Sgyx6uvbj + return 0 + kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'mongo mongodb://user:pass@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin /tmp/data.js' ++ mktemp + local LAST_OUT=/tmp/tmp.gc4H0YFF23 ++ mktemp + local LAST_ERR=/tmp/tmp.EUCw17xedg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'mongo mongodb://user:pass@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin /tmp/data.js' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gc4H0YFF23 Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("ae01e135-1868-4b6a-a7e2-3134318d1f94") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match + cat /tmp/tmp.EUCw17xedg + rm /tmp/tmp.gc4H0YFF23 /tmp/tmp.EUCw17xedg + return 0 + desc 'shard collection' + set +o xtrace ----------------------------------------------------------------------------------- shard collection ----------------------------------------------------------------------------------- + run_mongos 'sh.enableSharding("app")' clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local 'command=sh.enableSharding("app")' + local uri=clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MalnWPLCbI +++ mktemp ++ local LAST_ERR=/tmp/tmp.QgMKXkFWNQ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.MalnWPLCbI ++ cat /tmp/tmp.QgMKXkFWNQ ++ rm /tmp/tmp.MalnWPLCbI /tmp/tmp.QgMKXkFWNQ ++ return 0 + local client_container=psmdb-client-6c585f8dbd-525mt + local mongo_flag= + kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''sh.enableSharding("app")\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Pxo7lYzHzI ++ mktemp + local LAST_ERR=/tmp/tmp.meve26uMQa + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''sh.enableSharding("app")\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Pxo7lYzHzI Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("b7cd4b03-92b8-49f7-b28f-b903666e1f9b") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1727359108, 1), "signature" : { "hash" : BinData(0,"Xfgo5M6gB4yMbi3GUa4pceCsVq0="), "keyId" : NumberLong("7418950052674011147") } }, "operationTime" : Timestamp(1727359108, 1) } bye + cat /tmp/tmp.meve26uMQa + rm /tmp/tmp.Pxo7lYzHzI /tmp/tmp.meve26uMQa + return 0 + sleep 2 + run_mongos 'sh.shardCollection("app.city", { _id: 1 } )' clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local 'command=sh.shardCollection("app.city", { _id: 1 } )' + local uri=clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pE9l5QMyAA +++ mktemp ++ local LAST_ERR=/tmp/tmp.NYo84D3rII ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pE9l5QMyAA ++ cat /tmp/tmp.NYo84D3rII ++ rm /tmp/tmp.pE9l5QMyAA /tmp/tmp.NYo84D3rII ++ return 0 + local client_container=psmdb-client-6c585f8dbd-525mt + local mongo_flag= + kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''sh.shardCollection("app.city", { _id: 1 } )\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.TYWGSVffDM ++ mktemp + local LAST_ERR=/tmp/tmp.R2zlgA1VrS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''sh.shardCollection("app.city", { _id: 1 } )\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TYWGSVffDM Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("16315530-2772-4f0b-9254-5c2fd7355f56") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match { "collectionsharded" : "app.city", "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1727359113, 35), "signature" : { "hash" : BinData(0,"yhuEY/gybcMRn1+b71oQgBFIO3g="), "keyId" : NumberLong("7418950052674011147") } }, "operationTime" : Timestamp(1727359113, 35) } bye + cat /tmp/tmp.R2zlgA1VrS + rm /tmp/tmp.TYWGSVffDM /tmp/tmp.R2zlgA1VrS + return 0 + sleep 120 + desc 'Check chunks' + set +o xtrace ----------------------------------------------------------------------------------- Check chunks ----------------------------------------------------------------------------------- + chunks_param1=ns + chunks_param2='"app.city"' + [[ 7.0 != \4\.\4 ]] + chunks_param1=uuid ++ run_mongos 'use app\n db.getCollectionInfos({ "name": "city" })[0].info.uuid' user:pass@cross-site-sharded-main-mongos.cross-site-sharded-30230 ++ local 'command=use app\n db.getCollectionInfos({ "name": "city" })[0].info.uuid' ++ local uri=user:pass@cross-site-sharded-main-mongos.cross-site-sharded-30230 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ grep 'switched to db app' -A 1 ++ grep -v 'switched to db app' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.uGWqW8p3mZ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.1MVGeFv600 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.uGWqW8p3mZ +++ cat /tmp/tmp.1MVGeFv600 +++ rm /tmp/tmp.uGWqW8p3mZ /tmp/tmp.1MVGeFv600 +++ return 0 ++ local client_container=psmdb-client-6c585f8dbd-525mt ++ local mongo_flag= ++ kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''use app\n db.getCollectionInfos({ "name": "city" })[0].info.uuid\n'\'' | mongo mongodb://user:pass@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tVRnsTOURn +++ mktemp ++ local LAST_ERR=/tmp/tmp.tUYZqBjc07 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''use app\n db.getCollectionInfos({ "name": "city" })[0].info.uuid\n'\'' | mongo mongodb://user:pass@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.tVRnsTOURn ++ cat /tmp/tmp.tUYZqBjc07 ++ rm /tmp/tmp.tVRnsTOURn /tmp/tmp.tUYZqBjc07 ++ return 0 + chunks_param2='UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092")' + shards=0 + for i in '"rs0"' '"rs1"' ++ run_mongos 'use config\n db.chunks.count({"uuid": UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092"), "shard": "rs0"})' clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 ++ grep 'switched to db config' -A 1 ++ local 'command=use config\n db.chunks.count({"uuid": UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092"), "shard": "rs0"})' ++ local uri=clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ grep -v 'switched to db config' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.gL08evGIhO ++++ mktemp +++ local LAST_ERR=/tmp/tmp.JzzL5vs0cD +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.gL08evGIhO +++ cat /tmp/tmp.JzzL5vs0cD +++ rm /tmp/tmp.gL08evGIhO /tmp/tmp.JzzL5vs0cD +++ return 0 ++ local client_container=psmdb-client-6c585f8dbd-525mt ++ local mongo_flag= ++ kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092"), "shard": "rs0"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rwzovbj20k +++ mktemp ++ local LAST_ERR=/tmp/tmp.fiCDdeV9dG ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092"), "shard": "rs0"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.rwzovbj20k ++ cat /tmp/tmp.fiCDdeV9dG ++ rm /tmp/tmp.rwzovbj20k /tmp/tmp.fiCDdeV9dG ++ return 0 + out=3 + desc 'rs0 has 3 chunks' + set +o xtrace ----------------------------------------------------------------------------------- rs0 has 3 chunks ----------------------------------------------------------------------------------- + [[ 3 -ne 0 ]] + (( shards = shards + 1 )) + for i in '"rs0"' '"rs1"' ++ run_mongos 'use config\n db.chunks.count({"uuid": UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092"), "shard": "rs1"})' clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 ++ local 'command=use config\n db.chunks.count({"uuid": UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092"), "shard": "rs1"})' ++ local uri=clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 ++ local driver=mongodb ++ local suffix=.svc.cluster.local ++ grep 'switched to db config' -A 1 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp ++ grep -v 'switched to db config' +++ local LAST_OUT=/tmp/tmp.W4AdFtD4j3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.h49puUCpTV +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.W4AdFtD4j3 +++ cat /tmp/tmp.h49puUCpTV +++ rm /tmp/tmp.W4AdFtD4j3 /tmp/tmp.h49puUCpTV +++ return 0 ++ local client_container=psmdb-client-6c585f8dbd-525mt ++ local mongo_flag= ++ kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092"), "shard": "rs1"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0aic6qfqdd +++ mktemp ++ local LAST_ERR=/tmp/tmp.y7AFIadczx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''use config\n db.chunks.count({"uuid": UUID("3a62c472-31ba-4119-a2a7-7f40cb5fa092"), "shard": "rs1"})\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.0aic6qfqdd ++ cat /tmp/tmp.y7AFIadczx ++ rm /tmp/tmp.0aic6qfqdd /tmp/tmp.y7AFIadczx ++ return 0 + out=1 + desc 'rs1 has 1 chunks' + set +o xtrace ----------------------------------------------------------------------------------- rs1 has 1 chunks ----------------------------------------------------------------------------------- + [[ 1 -ne 0 ]] + (( shards = shards + 1 )) + [[ 2 -lt 2 ]] + desc 'create replica cluster' + set +o xtrace ----------------------------------------------------------------------------------- create replica cluster ----------------------------------------------------------------------------------- + create_namespace cross-site-sharded-replica-9589 0 + local namespace=cross-site-sharded-replica-9589 + local skip_clean_namespace=0 + [[ 1 == 1 ]] + [[ -z 0 ]] + '[' -n '' ']' + desc 'cleaned up old namespaces cross-site-sharded-replica-9589' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces cross-site-sharded-replica-9589 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace cross-site-sharded-replica-9589 --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.Z3MpKd55qt ++ mktemp + local LAST_ERR=/tmp/tmp.dneXGc3yLQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace cross-site-sharded-replica-9589 --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Z3MpKd55qt + cat /tmp/tmp.dneXGc3yLQ + rm /tmp/tmp.Z3MpKd55qt /tmp/tmp.dneXGc3yLQ + return 0 + kubectl_bin wait --for=delete namespace cross-site-sharded-replica-9589 ++ mktemp + local LAST_OUT=/tmp/tmp.PT4PtKUMRV ++ mktemp + local LAST_ERR=/tmp/tmp.hUod9iatng + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete namespace cross-site-sharded-replica-9589 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PT4PtKUMRV + cat /tmp/tmp.hUod9iatng + rm /tmp/tmp.PT4PtKUMRV /tmp/tmp.hUod9iatng + return 0 + desc 'create namespace cross-site-sharded-replica-9589' + set +o xtrace ----------------------------------------------------------------------------------- create namespace cross-site-sharded-replica-9589 ----------------------------------------------------------------------------------- + kubectl_bin create namespace cross-site-sharded-replica-9589 ++ mktemp + local LAST_OUT=/tmp/tmp.Ocx9JzAkVE ++ mktemp + local LAST_ERR=/tmp/tmp.FHw6hSphSR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cross-site-sharded-replica-9589 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Ocx9JzAkVE namespace/cross-site-sharded-replica-9589 created + cat /tmp/tmp.FHw6hSphSR + rm /tmp/tmp.Ocx9JzAkVE /tmp/tmp.FHw6hSphSR + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.4fiuJaX6wt +++ mktemp ++ local LAST_ERR=/tmp/tmp.fHICi9Clul ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.4fiuJaX6wt ++ cat /tmp/tmp.fHICi9Clul ++ rm /tmp/tmp.4fiuJaX6wt /tmp/tmp.fHICi9Clul ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-replica-9589 ++ mktemp + local LAST_OUT=/tmp/tmp.TF0EISsrbI ++ mktemp + local LAST_ERR=/tmp/tmp.b3SQa8Ep9k + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-replica-9589 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.TF0EISsrbI Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7" modified. + cat /tmp/tmp.b3SQa8Ep9k + rm /tmp/tmp.TF0EISsrbI /tmp/tmp.b3SQa8Ep9k + return 0 + deploy_operator + desc 'start PSMDB operator' + set +o xtrace ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- + local cr_file + '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/crd.yaml ']' + cr_file=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.tdt5NSHHqh ++ mktemp + local LAST_ERR=/tmp/tmp.0kSeF6jDSp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.tdt5NSHHqh customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied + cat /tmp/tmp.0kSeF6jDSp + rm /tmp/tmp.tdt5NSHHqh /tmp/tmp.0kSeF6jDSp + return 0 + '[' -n '' ']' + apply_rbac rbac + local operator_namespace=psmdb-operator + local rbac=rbac + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/rbac.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.gZmj9YjyaL ++ mktemp + local LAST_ERR=/tmp/tmp.4OcI4HXDo5 + local exit_status=0 + local timeout=4 + sed -e 's^namespace: .*^namespace: psmdb-operator^' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.gZmj9YjyaL role.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created rolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created + cat /tmp/tmp.4OcI4HXDo5 + rm /tmp/tmp.gZmj9YjyaL /tmp/tmp.4OcI4HXDo5 + return 0 + kubectl_bin apply -f - + yq eval ' (.spec.template.spec.containers[].image = "perconalab/percona-server-mongodb-operator:PR-1608-f10c3c44") | ((.. | select(.[] == "DISABLE_TELEMETRY")) |= .value="true") | ((.. | select(.[] == "LOG_LEVEL")) |= .value="DEBUG")' /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.zmJHILtRUz ++ mktemp + local LAST_ERR=/tmp/tmp.rZSC6mFjgo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zmJHILtRUz deployment.apps/percona-server-mongodb-operator created + cat /tmp/tmp.rZSC6mFjgo + rm /tmp/tmp.zmJHILtRUz /tmp/tmp.rZSC6mFjgo + return 0 + sleep 2 ++ get_operator_pod ++ kubectl_bin get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5sSPW8LFA6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AKCX3d26hC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=percona-server-mongodb-operator -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5sSPW8LFA6 ++ cat /tmp/tmp.AKCX3d26hC ++ rm /tmp/tmp.5sSPW8LFA6 /tmp/tmp.AKCX3d26hC ++ return 0 + wait_pod percona-server-mongodb-operator-f5b9b89df-r4qpl + local pod=percona-server-mongodb-operator-f5b9b89df-r4qpl + set +o xtrace waiting for pod/percona-server-mongodb-operator-f5b9b89df-r4qpl to be ready.OK + desc 'start client' + set +o xtrace ----------------------------------------------------------------------------------- start client ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/conf/client.yml ++ mktemp + local LAST_OUT=/tmp/tmp.9q4k8alSDu ++ mktemp + local LAST_ERR=/tmp/tmp.o9JUha9x0h + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/conf/client.yml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.9q4k8alSDu deployment.apps/psmdb-client created + cat /tmp/tmp.o9JUha9x0h + rm /tmp/tmp.9q4k8alSDu /tmp/tmp.o9JUha9x0h + return 0 + desc 'copy secrets from main to replica namespace and create all of them' + set +o xtrace ----------------------------------------------------------------------------------- copy secrets from main to replica namespace and create all of them ----------------------------------------------------------------------------------- + kubectl get secret cross-site-sharded-main-secrets -o yaml -n cross-site-sharded-30230 + kubectl_bin apply -f - + yq eval ' del(.metadata) | (.metadata.name = "cross-site-sharded-replica-secrets")' - ++ mktemp + local LAST_OUT=/tmp/tmp.fqZ8cjDUrM ++ mktemp + local LAST_ERR=/tmp/tmp.Fzi8DM25jW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fqZ8cjDUrM secret/cross-site-sharded-replica-secrets created + cat /tmp/tmp.Fzi8DM25jW + rm /tmp/tmp.fqZ8cjDUrM /tmp/tmp.Fzi8DM25jW + return 0 + kubectl_bin get secret cross-site-sharded-main-ssl-internal -o yaml -n cross-site-sharded-30230 + kubectl_bin apply -f - ++ mktemp + yq eval ' del(.metadata) | del(.status) | (.metadata.name = "cross-site-sharded-replica-ssl-internal")' - ++ mktemp + local LAST_OUT=/tmp/tmp.6Vx4w9nebj + local LAST_OUT=/tmp/tmp.fj8zfsLwE9 ++ mktemp + local LAST_ERR=/tmp/tmp.h2gUzw7KPp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - ++ mktemp + local LAST_ERR=/tmp/tmp.akCIEfwsi1 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get secret cross-site-sharded-main-ssl-internal -o yaml -n cross-site-sharded-30230 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.6Vx4w9nebj + cat /tmp/tmp.akCIEfwsi1 + rm /tmp/tmp.6Vx4w9nebj /tmp/tmp.akCIEfwsi1 + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fj8zfsLwE9 secret/cross-site-sharded-replica-ssl-internal created + cat /tmp/tmp.h2gUzw7KPp + rm /tmp/tmp.fj8zfsLwE9 /tmp/tmp.h2gUzw7KPp + return 0 + yq eval ' del(.metadata) | del(.status) | (.metadata.name = "cross-site-sharded-replica-ssl")' - + kubectl_bin get secret cross-site-sharded-main-ssl -o yaml -n cross-site-sharded-30230 ++ mktemp + kubectl_bin apply -f - + local LAST_OUT=/tmp/tmp.YEwi5cyHsM ++ mktemp + local LAST_ERR=/tmp/tmp.fZjlmXOXFt + local exit_status=0 + local timeout=4 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get secret cross-site-sharded-main-ssl -o yaml -n cross-site-sharded-30230 + local LAST_OUT=/tmp/tmp.DJiS4pLGeG ++ mktemp + local LAST_ERR=/tmp/tmp.g3YN2pMd7R + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.YEwi5cyHsM + cat /tmp/tmp.fZjlmXOXFt + rm /tmp/tmp.YEwi5cyHsM /tmp/tmp.fZjlmXOXFt + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.DJiS4pLGeG secret/cross-site-sharded-replica-ssl created + cat /tmp/tmp.g3YN2pMd7R + rm /tmp/tmp.DJiS4pLGeG /tmp/tmp.g3YN2pMd7R + return 0 + sleep 30 + desc 'create replica PSMDB cluster ' + set +o xtrace ----------------------------------------------------------------------------------- create replica PSMDB cluster ----------------------------------------------------------------------------------- + apply_cluster /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/cross-site-sharded-replica.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/cross-site-sharded-replica.yml + kubectl_bin apply -f - ++ mktemp + yq eval '.spec.upgradeOptions.apply="Never"' + local LAST_OUT=/tmp/tmp.FvPsjXcATM ++ mktemp + local LAST_ERR=/tmp/tmp.cyfv6NHWE4 + local exit_status=0 + local timeout=4 + yq eval '(.spec | select(.image == null)).image = "perconalab/percona-server-mongodb-operator:main-mongod7.0"' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(.spec | select(has("initImage"))).initImage = "perconalab/percona-server-mongodb-operator:PR-1608-f10c3c44"' + yq eval '(.spec | select(has("backup"))).backup.image = "perconalab/percona-server-mongodb-operator:main-backup"' + cat /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/conf/cross-site-sharded-replica.yml + yq eval '(.spec | select(has("pmm"))).pmm.image = "perconalab/pmm-client:dev-latest"' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.FvPsjXcATM perconaservermongodb.psmdb.percona.com/cross-site-sharded-replica created + cat /tmp/tmp.cyfv6NHWE4 + rm /tmp/tmp.FvPsjXcATM /tmp/tmp.cyfv6NHWE4 + return 0 + wait_for_running cross-site-sharded-replica-rs0 3 false + local name=cross-site-sharded-replica-rs0 + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=cross-site-sharded-replica ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-replica-rs0-0 + local pod=cross-site-sharded-replica-rs0-0 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-0 to be ready............OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-replica-rs0-1 + local pod=cross-site-sharded-replica-rs0-1 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-1 to be ready...........OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bcs63cvxEc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Aso8583f7w ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Bcs63cvxEc ++ cat /tmp/tmp.Aso8583f7w ++ rm /tmp/tmp.Bcs63cvxEc /tmp/tmp.Aso8583f7w ++ return 0 + [[ false == \t\r\u\e ]] + wait_pod cross-site-sharded-replica-rs0-2 + local pod=cross-site-sharded-replica-rs0-2 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-2 to be ready...........OK ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x7lzbmSQOu +++ mktemp ++ local LAST_ERR=/tmp/tmp.p5SYo5tBva ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.x7lzbmSQOu ++ cat /tmp/tmp.p5SYo5tBva ++ rm /tmp/tmp.x7lzbmSQOu /tmp/tmp.p5SYo5tBva ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running cross-site-sharded-replica-rs1 3 false + local name=cross-site-sharded-replica-rs1 + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs1 + local cluster_name=cross-site-sharded-replica ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-replica-rs1-0 + local pod=cross-site-sharded-replica-rs1-0 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs1-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-replica-rs1-1 + local pod=cross-site-sharded-replica-rs1-1 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs1-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs1")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h23VKnMg6G +++ mktemp ++ local LAST_ERR=/tmp/tmp.k902NKZY5K ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs1")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.h23VKnMg6G ++ cat /tmp/tmp.k902NKZY5K ++ rm /tmp/tmp.h23VKnMg6G /tmp/tmp.k902NKZY5K ++ return 0 + [[ false == \t\r\u\e ]] + wait_pod cross-site-sharded-replica-rs1-2 + local pod=cross-site-sharded-replica-rs1-2 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs1-2 to be ready.OK ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs1")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NUOzTWGx26 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BJstYajzjV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs1")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.NUOzTWGx26 ++ cat /tmp/tmp.BJstYajzjV ++ rm /tmp/tmp.NUOzTWGx26 /tmp/tmp.BJstYajzjV ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + wait_for_running cross-site-sharded-replica-cfg 3 false + local name=cross-site-sharded-replica-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=cross-site-sharded-replica ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-replica-cfg-0 + local pod=cross-site-sharded-replica-cfg-0 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-replica-cfg-1 + local pod=cross-site-sharded-replica-cfg-1 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VSx5ypf8yI +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gj2C5QySJB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.VSx5ypf8yI ++ cat /tmp/tmp.Gj2C5QySJB ++ rm /tmp/tmp.VSx5ypf8yI /tmp/tmp.Gj2C5QySJB ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod cross-site-sharded-replica-cfg-2 + local pod=cross-site-sharded-replica-cfg-2 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-2 to be ready.OK ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ObH51WMbf +++ mktemp ++ local LAST_ERR=/tmp/tmp.N5tezCZgvj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.7ObH51WMbf ++ cat /tmp/tmp.N5tezCZgvj ++ rm /tmp/tmp.7ObH51WMbf /tmp/tmp.N5tezCZgvj ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] ++ get_service_ip cross-site-sharded-replica-cfg-0 cfg ++ local service=cross-site-sharded-replica-cfg-0 ++ local server_type=cfg +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.koWVBaDXHh ++++ mktemp +++ local LAST_ERR=/tmp/tmp.x1ePjKZYHY +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.koWVBaDXHh +++ cat /tmp/tmp.x1ePjKZYHY +++ rm /tmp/tmp.koWVBaDXHh /tmp/tmp.x1ePjKZYHY +++ return 0 ++ '[' true '!=' true ']' ++ grep -q NotFound ++ kubectl_bin get service/cross-site-sharded-replica-cfg-0 -o 'jsonpath={.spec.type}' +++ kubectl_bin get service/cross-site-sharded-replica-cfg-0 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.M105Og6Pyx ++++ mktemp +++ local LAST_ERR=/tmp/tmp.CeGpbu9dMW +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-cfg-0 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.M105Og6Pyx +++ cat /tmp/tmp.CeGpbu9dMW +++ rm /tmp/tmp.M105Og6Pyx /tmp/tmp.CeGpbu9dMW +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-cfg-0 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.368X3uqm2d +++ mktemp ++ local LAST_ERR=/tmp/tmp.xArTmTp8Ub ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-cfg-0 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.368X3uqm2d ++ cat /tmp/tmp.xArTmTp8Ub ++ rm /tmp/tmp.368X3uqm2d /tmp/tmp.xArTmTp8Ub ++ return 0 ++ return + replica_cfg_0_endpoint=10.221.219.84 ++ get_service_ip cross-site-sharded-replica-cfg-1 cfg ++ local service=cross-site-sharded-replica-cfg-1 ++ local server_type=cfg +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.tNN8oaRwkq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.T5iUBb0rj0 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.tNN8oaRwkq +++ cat /tmp/tmp.T5iUBb0rj0 +++ rm /tmp/tmp.tNN8oaRwkq /tmp/tmp.T5iUBb0rj0 +++ return 0 ++ '[' true '!=' true ']' ++ kubectl_bin get service/cross-site-sharded-replica-cfg-1 -o 'jsonpath={.spec.type}' ++ grep -q NotFound +++ kubectl_bin get service/cross-site-sharded-replica-cfg-1 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Xy62ZvXgr3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.6jPlGiLx9K +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-cfg-1 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Xy62ZvXgr3 +++ cat /tmp/tmp.6jPlGiLx9K +++ rm /tmp/tmp.Xy62ZvXgr3 /tmp/tmp.6jPlGiLx9K +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-cfg-1 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.09MQuvNoIn +++ mktemp ++ local LAST_ERR=/tmp/tmp.SANxyuwJL7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-cfg-1 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.09MQuvNoIn ++ cat /tmp/tmp.SANxyuwJL7 ++ rm /tmp/tmp.09MQuvNoIn /tmp/tmp.SANxyuwJL7 ++ return 0 ++ return + replica_cfg_1_endpoint=10.221.216.55 ++ get_service_ip cross-site-sharded-replica-cfg-2 cfg ++ local service=cross-site-sharded-replica-cfg-2 ++ local server_type=cfg +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.72C2vOsSsX ++++ mktemp +++ local LAST_ERR=/tmp/tmp.dPjhjLd6pK +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.72C2vOsSsX +++ cat /tmp/tmp.dPjhjLd6pK +++ rm /tmp/tmp.72C2vOsSsX /tmp/tmp.dPjhjLd6pK +++ return 0 ++ '[' true '!=' true ']' ++ kubectl_bin get service/cross-site-sharded-replica-cfg-2 -o 'jsonpath={.spec.type}' ++ grep -q NotFound +++ kubectl_bin get service/cross-site-sharded-replica-cfg-2 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ZjyMpCRXPB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4OLyL7pwH5 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-cfg-2 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.ZjyMpCRXPB +++ cat /tmp/tmp.4OLyL7pwH5 +++ rm /tmp/tmp.ZjyMpCRXPB /tmp/tmp.4OLyL7pwH5 +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-cfg-2 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.onWne2mlAF +++ mktemp ++ local LAST_ERR=/tmp/tmp.JLTx7x1gHw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-cfg-2 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.onWne2mlAF ++ cat /tmp/tmp.JLTx7x1gHw ++ rm /tmp/tmp.onWne2mlAF /tmp/tmp.JLTx7x1gHw ++ return 0 ++ return + replica_cfg_2_endpoint=10.221.221.194 ++ get_service_ip cross-site-sharded-replica-rs0-0 ++ local service=cross-site-sharded-replica-rs0-0 ++ local server_type=rs0 +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.10AiuD8mHv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.IBjijekIOd +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.10AiuD8mHv +++ cat /tmp/tmp.IBjijekIOd +++ rm /tmp/tmp.10AiuD8mHv /tmp/tmp.IBjijekIOd +++ return 0 ++ '[' true '!=' true ']' ++ kubectl_bin get service/cross-site-sharded-replica-rs0-0 -o 'jsonpath={.spec.type}' ++ grep -q NotFound +++ kubectl_bin get service/cross-site-sharded-replica-rs0-0 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.7vHPx9GSOX ++++ mktemp +++ local LAST_ERR=/tmp/tmp.pDgp5O7ah7 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-rs0-0 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.7vHPx9GSOX +++ cat /tmp/tmp.pDgp5O7ah7 +++ rm /tmp/tmp.7vHPx9GSOX /tmp/tmp.pDgp5O7ah7 +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-rs0-0 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.54ggEFTbQx +++ mktemp ++ local LAST_ERR=/tmp/tmp.e5PkCksm2i ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-rs0-0 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.54ggEFTbQx ++ cat /tmp/tmp.e5PkCksm2i ++ rm /tmp/tmp.54ggEFTbQx /tmp/tmp.e5PkCksm2i ++ return 0 ++ return + replica_rs0_0_endpoint=10.221.216.48 ++ get_service_ip cross-site-sharded-replica-rs0-1 ++ local service=cross-site-sharded-replica-rs0-1 ++ local server_type=rs0 +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ME6e6e8wPU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.WRThADTntW +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.ME6e6e8wPU +++ cat /tmp/tmp.WRThADTntW +++ rm /tmp/tmp.ME6e6e8wPU /tmp/tmp.WRThADTntW +++ return 0 ++ '[' true '!=' true ']' ++ grep -q NotFound ++ kubectl_bin get service/cross-site-sharded-replica-rs0-1 -o 'jsonpath={.spec.type}' +++ kubectl_bin get service/cross-site-sharded-replica-rs0-1 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.N4zV66DaCW ++++ mktemp +++ local LAST_ERR=/tmp/tmp.rnNEskUUn2 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-rs0-1 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.N4zV66DaCW +++ cat /tmp/tmp.rnNEskUUn2 +++ rm /tmp/tmp.N4zV66DaCW /tmp/tmp.rnNEskUUn2 +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-rs0-1 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3z8238ia2Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZM0vuuqeUu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-rs0-1 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.3z8238ia2Y ++ cat /tmp/tmp.ZM0vuuqeUu ++ rm /tmp/tmp.3z8238ia2Y /tmp/tmp.ZM0vuuqeUu ++ return 0 ++ return + replica_rs0_1_endpoint=10.221.211.33 ++ get_service_ip cross-site-sharded-replica-rs0-2 ++ local service=cross-site-sharded-replica-rs0-2 ++ local server_type=rs0 +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.YnOiOoVgIg ++++ mktemp +++ local LAST_ERR=/tmp/tmp.2KPxFrUi2f +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.YnOiOoVgIg +++ cat /tmp/tmp.2KPxFrUi2f +++ rm /tmp/tmp.YnOiOoVgIg /tmp/tmp.2KPxFrUi2f +++ return 0 ++ '[' true '!=' true ']' ++ grep -q NotFound ++ kubectl_bin get service/cross-site-sharded-replica-rs0-2 -o 'jsonpath={.spec.type}' +++ kubectl_bin get service/cross-site-sharded-replica-rs0-2 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.qOR1tRlhLs ++++ mktemp +++ local LAST_ERR=/tmp/tmp.EPr4zE6LrN +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-rs0-2 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.qOR1tRlhLs +++ cat /tmp/tmp.EPr4zE6LrN +++ rm /tmp/tmp.qOR1tRlhLs /tmp/tmp.EPr4zE6LrN +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-rs0-2 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sscQ2mV0wV +++ mktemp ++ local LAST_ERR=/tmp/tmp.SNIvpsZMLj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-rs0-2 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.sscQ2mV0wV ++ cat /tmp/tmp.SNIvpsZMLj ++ rm /tmp/tmp.sscQ2mV0wV /tmp/tmp.SNIvpsZMLj ++ return 0 ++ return + replica_rs0_2_endpoint=10.221.223.85 ++ get_service_ip cross-site-sharded-replica-rs1-0 rs1 ++ local service=cross-site-sharded-replica-rs1-0 ++ local server_type=rs1 +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Nvjbd1t7cU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ekS4JEKDns +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.Nvjbd1t7cU +++ cat /tmp/tmp.ekS4JEKDns +++ rm /tmp/tmp.Nvjbd1t7cU /tmp/tmp.ekS4JEKDns +++ return 0 ++ '[' true '!=' true ']' ++ kubectl_bin get service/cross-site-sharded-replica-rs1-0 -o 'jsonpath={.spec.type}' ++ grep -q NotFound +++ kubectl_bin get service/cross-site-sharded-replica-rs1-0 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.LTH9Clu52c ++++ mktemp +++ local LAST_ERR=/tmp/tmp.BBjxs1qLPx +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-rs1-0 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.LTH9Clu52c +++ cat /tmp/tmp.BBjxs1qLPx +++ rm /tmp/tmp.LTH9Clu52c /tmp/tmp.BBjxs1qLPx +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-rs1-0 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yGJKggH7xu +++ mktemp ++ local LAST_ERR=/tmp/tmp.3tYxKLxDkl ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-rs1-0 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.yGJKggH7xu ++ cat /tmp/tmp.3tYxKLxDkl ++ rm /tmp/tmp.yGJKggH7xu /tmp/tmp.3tYxKLxDkl ++ return 0 ++ return + replica_rs1_0_endpoint=10.221.223.86 ++ get_service_ip cross-site-sharded-replica-rs1-1 rs1 ++ local service=cross-site-sharded-replica-rs1-1 ++ local server_type=rs1 +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.2F7AoQqkpB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.F0jFWavNmq +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.2F7AoQqkpB +++ cat /tmp/tmp.F0jFWavNmq +++ rm /tmp/tmp.2F7AoQqkpB /tmp/tmp.F0jFWavNmq +++ return 0 ++ '[' true '!=' true ']' ++ grep -q NotFound ++ kubectl_bin get service/cross-site-sharded-replica-rs1-1 -o 'jsonpath={.spec.type}' +++ kubectl_bin get service/cross-site-sharded-replica-rs1-1 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.zcHWuJMdAd ++++ mktemp +++ local LAST_ERR=/tmp/tmp.FLiEeQ4el4 +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-rs1-1 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.zcHWuJMdAd +++ cat /tmp/tmp.FLiEeQ4el4 +++ rm /tmp/tmp.zcHWuJMdAd /tmp/tmp.FLiEeQ4el4 +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-rs1-1 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vsvrxi8txF +++ mktemp ++ local LAST_ERR=/tmp/tmp.w7boVzHfFH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-rs1-1 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.vsvrxi8txF ++ cat /tmp/tmp.w7boVzHfFH ++ rm /tmp/tmp.vsvrxi8txF /tmp/tmp.w7boVzHfFH ++ return 0 ++ return + replica_rs1_1_endpoint=10.221.213.227 ++ get_service_ip cross-site-sharded-replica-rs1-2 rs1 ++ local service=cross-site-sharded-replica-rs1-2 ++ local server_type=rs1 +++ kubectl_bin get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ll4Si35twB ++++ mktemp +++ local LAST_ERR=/tmp/tmp.etGyqmp4pS +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get psmdb/cross-site-sharded-replica -o 'jsonpath={.spec.replsets[].expose.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.ll4Si35twB +++ cat /tmp/tmp.etGyqmp4pS +++ rm /tmp/tmp.ll4Si35twB /tmp/tmp.etGyqmp4pS +++ return 0 ++ '[' true '!=' true ']' ++ grep -q NotFound ++ kubectl_bin get service/cross-site-sharded-replica-rs1-2 -o 'jsonpath={.spec.type}' +++ kubectl_bin get service/cross-site-sharded-replica-rs1-2 -o 'jsonpath={.spec.type}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.pB2PTuxKey ++++ mktemp +++ local LAST_ERR=/tmp/tmp.6fdc59jR5R +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get service/cross-site-sharded-replica-rs1-2 -o 'jsonpath={.spec.type}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.pB2PTuxKey +++ cat /tmp/tmp.6fdc59jR5R +++ rm /tmp/tmp.pB2PTuxKey /tmp/tmp.6fdc59jR5R +++ return 0 ++ service_type=ClusterIP ++ '[' ClusterIP = ClusterIP ']' ++ kubectl_bin get service/cross-site-sharded-replica-rs1-2 -o 'jsonpath={.spec.clusterIP}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gE8VdNkv5w +++ mktemp ++ local LAST_ERR=/tmp/tmp.DxuAl22DtU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get service/cross-site-sharded-replica-rs1-2 -o 'jsonpath={.spec.clusterIP}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.gE8VdNkv5w ++ cat /tmp/tmp.DxuAl22DtU ++ rm /tmp/tmp.gE8VdNkv5w /tmp/tmp.DxuAl22DtU ++ return 0 ++ return + replica_rs1_2_endpoint=10.221.219.33 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.1gyP9muPZT +++ mktemp ++ local LAST_ERR=/tmp/tmp.iHDUGIYmOe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.1gyP9muPZT ++ cat /tmp/tmp.iHDUGIYmOe ++ rm /tmp/tmp.1gyP9muPZT /tmp/tmp.iHDUGIYmOe ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-30230 ++ mktemp + local LAST_OUT=/tmp/tmp.fy258rvj4f ++ mktemp + local LAST_ERR=/tmp/tmp.hf51cAu1pp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-30230 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fy258rvj4f Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7" modified. + cat /tmp/tmp.hf51cAu1pp + rm /tmp/tmp.fy258rvj4f /tmp/tmp.hf51cAu1pp + return 0 + kubectl_bin patch psmdb cross-site-sharded-main --type=merge --patch '{ "spec": {"replsets":[ {"affinity":{"antiAffinityTopologyKey": "none"},"arbiter":{"affinity":{"antiAffinityTopologyKey": "none"},"enabled":false,"size":1},"expose":{"enabled":true,"type":"ClusterIp"},"externalNodes":[{"host":"10.221.216.48","priority":0,"votes":0},{"host":"10.221.211.33","port":27017,"priority":1,"votes":1},{"host":"10.221.223.85", "port":27017,"priority":1,"votes":1}],"name":"rs0","nonvoting":{"affinity":{"antiAffinityTopologyKey":"none"},"enabled":false,"podDisruptionBudget":{"maxUnavailable":1},"resources":{"limits":{"cpu":"300m","memory":"0.5G"},"requests":{"cpu":"300m","memory":"0.5G"}},"size":3,"volumeSpec":{"persistentVolumeClaim":{"resources":{"requests":{"storage":"1Gi"}}}}},"podDisruptionBudget":{"maxUnavailable":1},"resources":{"limits":{"cpu":"300m","memory":"0.5G"},"requests":{"cpu":"300m","memory":"0.5G"}},"size":3,"volumeSpec":{"persistentVolumeClaim":{"resources":{"requests":{"storage":"3Gi"}}}}}, {"affinity":{"antiAffinityTopologyKey": "none"},"arbiter":{"affinity":{"antiAffinityTopologyKey": "none"},"enabled":false,"size":1},"expose":{"enabled":true,"type":"ClusterIp"},"externalNodes":[{"host":"10.221.223.86","priority":0,"votes":0},{"host":"10.221.213.227","port":27017,"priority":1,"votes":1},{"host":"10.221.219.33", "port":27017,"priority":1,"votes":1}],"name":"rs1","nonvoting":{"affinity":{"antiAffinityTopologyKey":"none"},"enabled":false,"podDisruptionBudget":{"maxUnavailable":1},"resources":{"limits":{"cpu":"300m","memory":"0.5G"},"requests":{"cpu":"300m","memory":"0.5G"}},"size":3,"volumeSpec":{"persistentVolumeClaim":{"resources":{"requests":{"storage":"1Gi"}}}}},"podDisruptionBudget":{"maxUnavailable":1},"resources":{"limits":{"cpu":"300m","memory":"0.5G"},"requests":{"cpu":"300m","memory":"0.5G"}},"size":3,"volumeSpec":{"persistentVolumeClaim":{"resources":{"requests":{"storage":"3Gi"}}}}} ], "sharding":{"configsvrReplSet":{ "externalNodes": [{"host":"10.221.219.84","priority":1,"votes":1 },{"host":"10.221.216.55", "priority":1,"votes":1},{"host":"10.221.221.194","priority":0,"votes":0}]}} } }' ++ mktemp + local LAST_OUT=/tmp/tmp.SFa3zLbI69 ++ mktemp + local LAST_ERR=/tmp/tmp.S4QMiqIAAu + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb cross-site-sharded-main --type=merge --patch '{ "spec": {"replsets":[ {"affinity":{"antiAffinityTopologyKey": "none"},"arbiter":{"affinity":{"antiAffinityTopologyKey": "none"},"enabled":false,"size":1},"expose":{"enabled":true,"type":"ClusterIp"},"externalNodes":[{"host":"10.221.216.48","priority":0,"votes":0},{"host":"10.221.211.33","port":27017,"priority":1,"votes":1},{"host":"10.221.223.85", "port":27017,"priority":1,"votes":1}],"name":"rs0","nonvoting":{"affinity":{"antiAffinityTopologyKey":"none"},"enabled":false,"podDisruptionBudget":{"maxUnavailable":1},"resources":{"limits":{"cpu":"300m","memory":"0.5G"},"requests":{"cpu":"300m","memory":"0.5G"}},"size":3,"volumeSpec":{"persistentVolumeClaim":{"resources":{"requests":{"storage":"1Gi"}}}}},"podDisruptionBudget":{"maxUnavailable":1},"resources":{"limits":{"cpu":"300m","memory":"0.5G"},"requests":{"cpu":"300m","memory":"0.5G"}},"size":3,"volumeSpec":{"persistentVolumeClaim":{"resources":{"requests":{"storage":"3Gi"}}}}}, {"affinity":{"antiAffinityTopologyKey": "none"},"arbiter":{"affinity":{"antiAffinityTopologyKey": "none"},"enabled":false,"size":1},"expose":{"enabled":true,"type":"ClusterIp"},"externalNodes":[{"host":"10.221.223.86","priority":0,"votes":0},{"host":"10.221.213.227","port":27017,"priority":1,"votes":1},{"host":"10.221.219.33", "port":27017,"priority":1,"votes":1}],"name":"rs1","nonvoting":{"affinity":{"antiAffinityTopologyKey":"none"},"enabled":false,"podDisruptionBudget":{"maxUnavailable":1},"resources":{"limits":{"cpu":"300m","memory":"0.5G"},"requests":{"cpu":"300m","memory":"0.5G"}},"size":3,"volumeSpec":{"persistentVolumeClaim":{"resources":{"requests":{"storage":"1Gi"}}}}},"podDisruptionBudget":{"maxUnavailable":1},"resources":{"limits":{"cpu":"300m","memory":"0.5G"},"requests":{"cpu":"300m","memory":"0.5G"}},"size":3,"volumeSpec":{"persistentVolumeClaim":{"resources":{"requests":{"storage":"3Gi"}}}}} ], "sharding":{"configsvrReplSet":{ "externalNodes": [{"host":"10.221.219.84","priority":1,"votes":1 },{"host":"10.221.216.55", "priority":1,"votes":1},{"host":"10.221.221.194","priority":0,"votes":0}]}} } }' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.SFa3zLbI69 perconaservermongodb.psmdb.percona.com/cross-site-sharded-main patched + cat /tmp/tmp.S4QMiqIAAu + rm /tmp/tmp.SFa3zLbI69 /tmp/tmp.S4QMiqIAAu + return 0 + wait_for_members 10.221.219.84 cfg + local endpoint=10.221.219.84 + local rsName=cfg + local nodes_amount=0 + [[ 0 == 6 ]] ++ run_mongos 'rs.conf().members.length' clusterAdmin:clusterAdmin123456@10.221.219.84 mongodb :27017 ++ local 'command=rs.conf().members.length' ++ local uri=clusterAdmin:clusterAdmin123456@10.221.219.84 ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|bye' ++ local driver=mongodb ++ local suffix=:27017 ++ /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.nT5JrVhZfo ++++ mktemp +++ local LAST_ERR=/tmp/tmp.dSQuekbfdS +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.nT5JrVhZfo +++ cat /tmp/tmp.dSQuekbfdS +++ rm /tmp/tmp.nT5JrVhZfo /tmp/tmp.dSQuekbfdS +++ return 0 ++ local client_container=psmdb-client-6c585f8dbd-525mt ++ local mongo_flag= ++ kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''rs.conf().members.length\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.219.84:27017/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KH6UyAQupK +++ mktemp ++ local LAST_ERR=/tmp/tmp.NTqwa2CZBb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''rs.conf().members.length\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.219.84:27017/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.KH6UyAQupK ++ cat /tmp/tmp.NTqwa2CZBb ++ rm /tmp/tmp.KH6UyAQupK /tmp/tmp.NTqwa2CZBb ++ return 0 + nodes_amount=4 + echo 'waiting for all members to be configured in cfg' waiting for all members to be configured in cfg + let retry+=1 + '[' 1 -ge 15 ']' + echo -n . .+ sleep 10 + [[ 4 == 6 ]] ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|bye' ++ /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ run_mongos 'rs.conf().members.length' clusterAdmin:clusterAdmin123456@10.221.219.84 mongodb :27017 ++ local 'command=rs.conf().members.length' ++ local uri=clusterAdmin:clusterAdmin123456@10.221.219.84 ++ local driver=mongodb ++ local suffix=:27017 +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.v7igHSkb20 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4JkMWLXLQI +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.v7igHSkb20 +++ cat /tmp/tmp.4JkMWLXLQI +++ rm /tmp/tmp.v7igHSkb20 /tmp/tmp.4JkMWLXLQI +++ return 0 ++ local client_container=psmdb-client-6c585f8dbd-525mt ++ local mongo_flag= ++ kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''rs.conf().members.length\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.219.84:27017/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CWWwsPqBWV +++ mktemp ++ local LAST_ERR=/tmp/tmp.m5FlU4f4JY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''rs.conf().members.length\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.219.84:27017/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.CWWwsPqBWV ++ cat /tmp/tmp.m5FlU4f4JY ++ rm /tmp/tmp.CWWwsPqBWV /tmp/tmp.m5FlU4f4JY ++ return 0 + nodes_amount=6 + echo 'waiting for all members to be configured in cfg' waiting for all members to be configured in cfg + let retry+=1 + '[' 2 -ge 15 ']' + echo -n . .+ sleep 10 + [[ 6 == 6 ]] + wait_for_members 10.221.216.48 rs0 + local endpoint=10.221.216.48 + local rsName=rs0 + local nodes_amount=0 + [[ 0 == 6 ]] ++ run_mongos 'rs.conf().members.length' clusterAdmin:clusterAdmin123456@10.221.216.48 mongodb :27017 ++ local 'command=rs.conf().members.length' ++ local uri=clusterAdmin:clusterAdmin123456@10.221.216.48 ++ local driver=mongodb ++ local suffix=:27017 ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|bye' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.QFh3qAObui ++++ mktemp ++ /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ local LAST_ERR=/tmp/tmp.ITMTtAxxXL +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.QFh3qAObui +++ cat /tmp/tmp.ITMTtAxxXL +++ rm /tmp/tmp.QFh3qAObui /tmp/tmp.ITMTtAxxXL +++ return 0 ++ local client_container=psmdb-client-6c585f8dbd-525mt ++ local mongo_flag= ++ kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''rs.conf().members.length\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.216.48:27017/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xqmDDyrf0f +++ mktemp ++ local LAST_ERR=/tmp/tmp.wkW6oWZrDP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''rs.conf().members.length\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.216.48:27017/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.xqmDDyrf0f ++ cat /tmp/tmp.wkW6oWZrDP ++ rm /tmp/tmp.xqmDDyrf0f /tmp/tmp.wkW6oWZrDP ++ return 0 + nodes_amount=6 + echo 'waiting for all members to be configured in rs0' waiting for all members to be configured in rs0 + let retry+=1 + '[' 3 -ge 15 ']' + echo -n . .+ sleep 10 + [[ 6 == 6 ]] + wait_for_members 10.221.223.86 rs1 + local endpoint=10.221.223.86 + local rsName=rs1 + local nodes_amount=0 + [[ 0 == 6 ]] ++ run_mongos 'rs.conf().members.length' clusterAdmin:clusterAdmin123456@10.221.223.86 mongodb :27017 ++ local 'command=rs.conf().members.length' ++ local uri=clusterAdmin:clusterAdmin123456@10.221.223.86 ++ local driver=mongodb ++ local suffix=:27017 ++ egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:|bye' ++ /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.3lVmrkLXr1 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.kFWb5iSIQs +++ local exit_status=0 +++ local timeout=4 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 -a -n 1 ']' +++ break +++ cat /tmp/tmp.3lVmrkLXr1 +++ cat /tmp/tmp.kFWb5iSIQs +++ rm /tmp/tmp.3lVmrkLXr1 /tmp/tmp.kFWb5iSIQs +++ return 0 ++ local client_container=psmdb-client-6c585f8dbd-525mt ++ local mongo_flag= ++ kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''rs.conf().members.length\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.223.86:27017/admin ' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Sf11tS82Np +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rc0zsHPLhb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'printf '\''rs.conf().members.length\n'\'' | mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.223.86:27017/admin ' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Sf11tS82Np ++ cat /tmp/tmp.Rc0zsHPLhb ++ rm /tmp/tmp.Sf11tS82Np /tmp/tmp.Rc0zsHPLhb ++ return 0 + nodes_amount=6 + echo 'waiting for all members to be configured in rs1' waiting for all members to be configured in rs1 + let retry+=1 + '[' 4 -ge 15 ']' + echo -n . .+ sleep 10 + [[ 6 == 6 ]] ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.pCEMOopraE +++ mktemp ++ local LAST_ERR=/tmp/tmp.5u4NBHkeEk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pCEMOopraE ++ cat /tmp/tmp.5u4NBHkeEk ++ rm /tmp/tmp.pCEMOopraE /tmp/tmp.5u4NBHkeEk ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-replica-9589 ++ mktemp + local LAST_OUT=/tmp/tmp.3Fu1aGLxFp ++ mktemp + local LAST_ERR=/tmp/tmp.kTVPZJikul + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-replica-9589 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3Fu1aGLxFp Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7" modified. + cat /tmp/tmp.kTVPZJikul + rm /tmp/tmp.3Fu1aGLxFp /tmp/tmp.kTVPZJikul + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- + wait_for_running cross-site-sharded-replica-rs0 3 + local name=cross-site-sharded-replica-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=cross-site-sharded-replica ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-replica-rs0-0 + local pod=cross-site-sharded-replica-rs0-0 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-replica-rs0-1 + local pod=cross-site-sharded-replica-rs0-1 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AsKbO8GiP6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.L8V3sjQQSy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.AsKbO8GiP6 ++ cat /tmp/tmp.L8V3sjQQSy ++ rm /tmp/tmp.AsKbO8GiP6 /tmp/tmp.L8V3sjQQSy ++ return 0 + [[ false == \t\r\u\e ]] + wait_pod cross-site-sharded-replica-rs0-2 + local pod=cross-site-sharded-replica-rs0-2 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-2 to be ready.OK ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5AeTgaQ0pX +++ mktemp ++ local LAST_ERR=/tmp/tmp.sJPfvbRmTx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.5AeTgaQ0pX ++ cat /tmp/tmp.sJPfvbRmTx ++ rm /tmp/tmp.5AeTgaQ0pX /tmp/tmp.sJPfvbRmTx ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + wait_for_running cross-site-sharded-replica-cfg 3 false + local name=cross-site-sharded-replica-cfg + let last_pod=2 + local check_cluster_readyness=false + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=cross-site-sharded-replica ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-replica-cfg-0 + local pod=cross-site-sharded-replica-cfg-0 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-replica-cfg-1 + local pod=cross-site-sharded-replica-cfg-1 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mjSjp2mokx +++ mktemp ++ local LAST_ERR=/tmp/tmp.2WuHKpe27V ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.mjSjp2mokx ++ cat /tmp/tmp.2WuHKpe27V ++ rm /tmp/tmp.mjSjp2mokx /tmp/tmp.2WuHKpe27V ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod cross-site-sharded-replica-cfg-2 + local pod=cross-site-sharded-replica-cfg-2 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-2 to be ready.OK ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fkmv4OWGk0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.hT2M8qrc0J ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Fkmv4OWGk0 ++ cat /tmp/tmp.hT2M8qrc0J ++ rm /tmp/tmp.Fkmv4OWGk0 /tmp/tmp.hT2M8qrc0J ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ false == \t\r\u\e ]] + desc 'create user' + set +o xtrace ----------------------------------------------------------------------------------- create user ----------------------------------------------------------------------------------- + run_mongos 'db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' userAdmin:userAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local 'command=db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})' + local uri=userAdmin:userAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DlcEsEPswY +++ mktemp ++ local LAST_ERR=/tmp/tmp.ejm47Ith5x ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.DlcEsEPswY ++ cat /tmp/tmp.ejm47Ith5x ++ rm /tmp/tmp.DlcEsEPswY /tmp/tmp.ejm47Ith5x ++ return 0 + local client_container=psmdb-client-6c585f8dbd-xnl6c + local mongo_flag= + kubectl_bin exec psmdb-client-6c585f8dbd-xnl6c -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.wGCHQ5FtAf ++ mktemp + local LAST_ERR=/tmp/tmp.uPJKGL7rqL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-xnl6c -- bash -c 'printf '\''db.createUser({user:"myApp",pwd:"myPass",roles:[{db:"myApp",role:"readWrite"}]})\n'\'' | mongo mongodb://userAdmin:userAdmin123456@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wGCHQ5FtAf Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("dd7179e3-2a77-4803-95b2-f866018d7c3b") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" } ] } bye + cat /tmp/tmp.uPJKGL7rqL + rm /tmp/tmp.wGCHQ5FtAf /tmp/tmp.uPJKGL7rqL + return 0 + sleep 2 + desc 'write data, read from all' + set +o xtrace ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- + run_mongos 'use myApp\n db.test.insert({ x: 100500 })' myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local 'command=use myApp\n db.test.insert({ x: 100500 })' + local uri=myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FQB7kBAU1O +++ mktemp ++ local LAST_ERR=/tmp/tmp.WNHvnGAMs0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.FQB7kBAU1O ++ cat /tmp/tmp.WNHvnGAMs0 ++ rm /tmp/tmp.FQB7kBAU1O /tmp/tmp.WNHvnGAMs0 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-xnl6c + local mongo_flag= + kubectl_bin exec psmdb-client-6c585f8dbd-xnl6c -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb://myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.cyvvmFxF6L ++ mktemp + local LAST_ERR=/tmp/tmp.8CnvVmf164 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-xnl6c -- bash -c 'printf '\''use myApp\n db.test.insert({ x: 100500 })\n'\'' | mongo mongodb://myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.cyvvmFxF6L Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9cd003d9-605c-40a6-ae11-7167ef22d638") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye + cat /tmp/tmp.8CnvVmf164 + rm /tmp/tmp.cyvvmFxF6L /tmp/tmp.8CnvVmf164 + return 0 + minikube_sleep + sleep_time=10 + [[ '' == 1 ]] + desc 'Compare data' + set +o xtrace ----------------------------------------------------------------------------------- Compare data ----------------------------------------------------------------------------------- + compare_mongos_cmd find myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local command=find + local uri=myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230 mongodb '' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BJvHKd2DUK +++ mktemp ++ local LAST_ERR=/tmp/tmp.dqd5Xe4R0m ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.BJvHKd2DUK ++ cat /tmp/tmp.dqd5Xe4R0m ++ rm /tmp/tmp.BJvHKd2DUK /tmp/tmp.dqd5Xe4R0m ++ return 0 + local client_container=psmdb-client-6c585f8dbd-xnl6c + local mongo_flag= + kubectl_bin exec psmdb-client-6c585f8dbd-xnl6c -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.eM6qcDkDQ3 ++ mktemp + local LAST_ERR=/tmp/tmp.9oMyNabGGd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-xnl6c -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cross-site-sharded-main-mongos.cross-site-sharded-30230.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.eM6qcDkDQ3 + cat /tmp/tmp.9oMyNabGGd + rm /tmp/tmp.eM6qcDkDQ3 /tmp/tmp.9oMyNabGGd + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/compare/find.json /tmp/tmp.rd0HZlsLU2/find + desc 'test failover' + set +o xtrace ----------------------------------------------------------------------------------- test failover ----------------------------------------------------------------------------------- ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.PCSqKPiBTN +++ mktemp ++ local LAST_ERR=/tmp/tmp.uQJevEBTCJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.PCSqKPiBTN ++ cat /tmp/tmp.uQJevEBTCJ ++ rm /tmp/tmp.PCSqKPiBTN /tmp/tmp.uQJevEBTCJ ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-30230 ++ mktemp + local LAST_OUT=/tmp/tmp.UOAvMKv4CZ ++ mktemp + local LAST_ERR=/tmp/tmp.mryv9Rb3dH + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-30230 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UOAvMKv4CZ Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7" modified. + cat /tmp/tmp.mryv9Rb3dH + rm /tmp/tmp.UOAvMKv4CZ /tmp/tmp.mryv9Rb3dH + return 0 + kubectl_bin delete psmdb cross-site-sharded-main ++ mktemp + local LAST_OUT=/tmp/tmp.S9N5PQ2TrV ++ mktemp + local LAST_ERR=/tmp/tmp.HjCK37c9Sj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete psmdb cross-site-sharded-main + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.S9N5PQ2TrV perconaservermongodb.psmdb.percona.com "cross-site-sharded-main" deleted + cat /tmp/tmp.HjCK37c9Sj + rm /tmp/tmp.S9N5PQ2TrV /tmp/tmp.HjCK37c9Sj + return 0 + desc 'run disaster recovery script for replset: cfg' + set +o xtrace ----------------------------------------------------------------------------------- run disaster recovery script for replset: cfg ----------------------------------------------------------------------------------- + run_script_mongos /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js clusterAdmin:clusterAdmin123456@10.221.219.84 mongodb :27017 + local script=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js + local uri=clusterAdmin:clusterAdmin123456@10.221.219.84 + local driver=mongodb + local suffix=:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WY4gXbv4QC +++ mktemp ++ local LAST_ERR=/tmp/tmp.63C0PBeVyB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.WY4gXbv4QC ++ cat /tmp/tmp.63C0PBeVyB ++ rm /tmp/tmp.WY4gXbv4QC /tmp/tmp.63C0PBeVyB ++ return 0 + local client_container=psmdb-client-6c585f8dbd-525mt + local mongo_flag= ++ basename /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js + name=disaster_recovery.js + kubectl_bin cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js cross-site-sharded-30230/psmdb-client-6c585f8dbd-525mt:/tmp ++ mktemp + local LAST_OUT=/tmp/tmp.sVPRZzInWi ++ mktemp + local LAST_ERR=/tmp/tmp.CiWaVLZ5EJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js cross-site-sharded-30230/psmdb-client-6c585f8dbd-525mt:/tmp + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.sVPRZzInWi + cat /tmp/tmp.CiWaVLZ5EJ + rm /tmp/tmp.sVPRZzInWi /tmp/tmp.CiWaVLZ5EJ + return 0 + kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.219.84:27017/admin /tmp/disaster_recovery.js' ++ mktemp + local LAST_OUT=/tmp/tmp.Oq5FgLnIeb ++ mktemp + local LAST_ERR=/tmp/tmp.zYVaQdISVd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.219.84:27017/admin /tmp/disaster_recovery.js' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.Oq5FgLnIeb Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://10.221.219.84:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3b56afb4-22fd-4b02-848b-07913eb69dd3") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match + cat /tmp/tmp.zYVaQdISVd + rm /tmp/tmp.Oq5FgLnIeb /tmp/tmp.zYVaQdISVd + return 0 + desc 'run disaster recovery script for replset: rs0' + set +o xtrace ----------------------------------------------------------------------------------- run disaster recovery script for replset: rs0 ----------------------------------------------------------------------------------- + run_script_mongos /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js clusterAdmin:clusterAdmin123456@10.221.216.48 mongodb :27017 + local script=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js + local uri=clusterAdmin:clusterAdmin123456@10.221.216.48 + local driver=mongodb + local suffix=:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kTK15gOi0F +++ mktemp ++ local LAST_ERR=/tmp/tmp.agtq3qLWyk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.kTK15gOi0F ++ cat /tmp/tmp.agtq3qLWyk ++ rm /tmp/tmp.kTK15gOi0F /tmp/tmp.agtq3qLWyk ++ return 0 + local client_container=psmdb-client-6c585f8dbd-525mt + local mongo_flag= ++ basename /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js + name=disaster_recovery.js + kubectl_bin cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js cross-site-sharded-30230/psmdb-client-6c585f8dbd-525mt:/tmp ++ mktemp + local LAST_OUT=/tmp/tmp.fawDHwSb8o ++ mktemp + local LAST_ERR=/tmp/tmp.6aQNrxkoxM + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js cross-site-sharded-30230/psmdb-client-6c585f8dbd-525mt:/tmp + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.fawDHwSb8o + cat /tmp/tmp.6aQNrxkoxM + rm /tmp/tmp.fawDHwSb8o /tmp/tmp.6aQNrxkoxM + return 0 + kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.216.48:27017/admin /tmp/disaster_recovery.js' ++ mktemp + local LAST_OUT=/tmp/tmp.inYJzSuhhH ++ mktemp + local LAST_ERR=/tmp/tmp.lknX6VSKSl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.216.48:27017/admin /tmp/disaster_recovery.js' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.inYJzSuhhH Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://10.221.216.48:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9a9dd3c2-f50b-40f9-bb95-3bbce9f422bf") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match + cat /tmp/tmp.lknX6VSKSl + rm /tmp/tmp.inYJzSuhhH /tmp/tmp.lknX6VSKSl + return 0 + desc 'run disaster recovery script for replset: rs1' + set +o xtrace ----------------------------------------------------------------------------------- run disaster recovery script for replset: rs1 ----------------------------------------------------------------------------------- + run_script_mongos /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js clusterAdmin:clusterAdmin123456@10.221.223.86 mongodb :27017 + local script=/mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js + local uri=clusterAdmin:clusterAdmin123456@10.221.223.86 + local driver=mongodb + local suffix=:27017 ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h6JXst7l9n +++ mktemp ++ local LAST_ERR=/tmp/tmp.ELLVvz0oKk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.h6JXst7l9n ++ cat /tmp/tmp.ELLVvz0oKk ++ rm /tmp/tmp.h6JXst7l9n /tmp/tmp.ELLVvz0oKk ++ return 0 + local client_container=psmdb-client-6c585f8dbd-525mt + local mongo_flag= ++ basename /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js + name=disaster_recovery.js + kubectl_bin cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js cross-site-sharded-30230/psmdb-client-6c585f8dbd-525mt:/tmp ++ mktemp + local LAST_OUT=/tmp/tmp.jImJsQPHVN ++ mktemp + local LAST_ERR=/tmp/tmp.l50OpznvF7 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl cp /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/disaster_recovery.js cross-site-sharded-30230/psmdb-client-6c585f8dbd-525mt:/tmp + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.jImJsQPHVN + cat /tmp/tmp.l50OpznvF7 + rm /tmp/tmp.jImJsQPHVN /tmp/tmp.l50OpznvF7 + return 0 + kubectl_bin exec psmdb-client-6c585f8dbd-525mt -- bash -c 'mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.223.86:27017/admin /tmp/disaster_recovery.js' ++ mktemp + local LAST_OUT=/tmp/tmp.2cMdaKiHQu ++ mktemp + local LAST_ERR=/tmp/tmp.p7onGAPi93 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-525mt -- bash -c 'mongo mongodb://clusterAdmin:clusterAdmin123456@10.221.223.86:27017/admin /tmp/disaster_recovery.js' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.2cMdaKiHQu Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://10.221.223.86:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("8f5e6f13-a995-4807-ac34-f633db331acf") } Percona Server for MongoDB server version: v7.0.14-8 WARNING: shell and server versions do not match + cat /tmp/tmp.p7onGAPi93 + rm /tmp/tmp.2cMdaKiHQu /tmp/tmp.p7onGAPi93 + return 0 + desc 'make replica cluster managed' + set +o xtrace ----------------------------------------------------------------------------------- make replica cluster managed ----------------------------------------------------------------------------------- ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.UHHFEFAG68 +++ mktemp ++ local LAST_ERR=/tmp/tmp.91qgkFncTf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.UHHFEFAG68 ++ cat /tmp/tmp.91qgkFncTf ++ rm /tmp/tmp.UHHFEFAG68 /tmp/tmp.91qgkFncTf ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-replica-9589 ++ mktemp + local LAST_OUT=/tmp/tmp.3usaoEj2EW ++ mktemp + local LAST_ERR=/tmp/tmp.MW3XIhG6gd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7 --namespace=cross-site-sharded-replica-9589 + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.3usaoEj2EW Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1608-f10c3c44-3-cluster7" modified. + cat /tmp/tmp.MW3XIhG6gd + rm /tmp/tmp.3usaoEj2EW /tmp/tmp.MW3XIhG6gd + return 0 + kubectl_bin patch psmdb cross-site-sharded-replica --type=merge --patch '{"spec":{"unmanaged": false}}' ++ mktemp + local LAST_OUT=/tmp/tmp.yDVJJ7CqK7 ++ mktemp + local LAST_ERR=/tmp/tmp.ACFOrfdD68 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch psmdb cross-site-sharded-replica --type=merge --patch '{"spec":{"unmanaged": false}}' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.yDVJJ7CqK7 perconaservermongodb.psmdb.percona.com/cross-site-sharded-replica patched + cat /tmp/tmp.ACFOrfdD68 + rm /tmp/tmp.yDVJJ7CqK7 /tmp/tmp.ACFOrfdD68 + return 0 + wait_for_running cross-site-sharded-replica-rs0 3 + local name=cross-site-sharded-replica-rs0 + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=rs0 + local cluster_name=cross-site-sharded-replica ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-replica-rs0-0 + local pod=cross-site-sharded-replica-rs0-0 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-replica-rs0-1 + local pod=cross-site-sharded-replica-rs0-1 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2mmoNVs3Ry +++ mktemp ++ local LAST_ERR=/tmp/tmp.BchdwSof4h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.2mmoNVs3Ry ++ cat /tmp/tmp.BchdwSof4h ++ rm /tmp/tmp.2mmoNVs3Ry /tmp/tmp.BchdwSof4h ++ return 0 + [[ false == \t\r\u\e ]] + wait_pod cross-site-sharded-replica-rs0-2 + local pod=cross-site-sharded-replica-rs0-2 + set +o xtrace waiting for pod/cross-site-sharded-replica-rs0-2 to be ready.OK ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pV4M3WENmJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.HTVzwVzFV7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="rs0")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.pV4M3WENmJ ++ cat /tmp/tmp.HTVzwVzFV7 ++ rm /tmp/tmp.pV4M3WENmJ /tmp/tmp.HTVzwVzFV7 ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + wait_for_running cross-site-sharded-replica-cfg 3 + local name=cross-site-sharded-replica-cfg + let last_pod=2 + local check_cluster_readyness=true + set_debug + [[ 1 == 1 ]] + set -o xtrace + local rs_name=cfg + local cluster_name=cross-site-sharded-replica ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + [[ 0 -eq 2 ]] + wait_pod cross-site-sharded-replica-cfg-0 + local pod=cross-site-sharded-replica-cfg-0 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-0 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 1 -eq 2 ]] + wait_pod cross-site-sharded-replica-cfg-1 + local pod=cross-site-sharded-replica-cfg-1 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-1 to be ready.OK + for i in '$(seq 0 $last_pod)' + [[ 2 -eq 2 ]] ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iK8PJMbMAH +++ mktemp ++ local LAST_ERR=/tmp/tmp.VE8m0Zy6U8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].arbiter.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.iK8PJMbMAH ++ cat /tmp/tmp.VE8m0Zy6U8 ++ rm /tmp/tmp.iK8PJMbMAH /tmp/tmp.VE8m0Zy6U8 ++ return 0 + [[ '' == \t\r\u\e ]] + wait_pod cross-site-sharded-replica-cfg-2 + local pod=cross-site-sharded-replica-cfg-2 + set +o xtrace waiting for pod/cross-site-sharded-replica-cfg-2 to be ready.OK ++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Npp6CXlFoK +++ mktemp ++ local LAST_ERR=/tmp/tmp.2zbu8VtBXO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.spec.replsets[?(@.name=="cfg")].non_voting.enabled}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.Npp6CXlFoK ++ cat /tmp/tmp.2zbu8VtBXO ++ rm /tmp/tmp.Npp6CXlFoK /tmp/tmp.2zbu8VtBXO ++ return 0 + [[ '' == \t\r\u\e ]] + sleep 10 + [[ true == \t\r\u\e ]] + set +x Waiting for cluster readyness + desc 'check failover status' + set +o xtrace ----------------------------------------------------------------------------------- check failover status ----------------------------------------------------------------------------------- + compare_mongos_cmd find myApp:myPass@cross-site-sharded-replica-mongos.cross-site-sharded-replica-9589 + local command=find + local uri=myApp:myPass@cross-site-sharded-replica-mongos.cross-site-sharded-replica-9589 + local postfix= + local suffix= + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@cross-site-sharded-replica-mongos.cross-site-sharded-replica-9589 mongodb '' + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@cross-site-sharded-replica-mongos.cross-site-sharded-replica-9589 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.EDJDHFfO18 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tk9APbvzB1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.EDJDHFfO18 ++ cat /tmp/tmp.tk9APbvzB1 ++ rm /tmp/tmp.EDJDHFfO18 /tmp/tmp.tk9APbvzB1 ++ return 0 + local client_container=psmdb-client-6c585f8dbd-xnl6c + local mongo_flag= + kubectl_bin exec psmdb-client-6c585f8dbd-xnl6c -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cross-site-sharded-replica-mongos.cross-site-sharded-replica-9589.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.OREwpdrQ6f ++ mktemp + local LAST_ERR=/tmp/tmp.VCVRFk3nWm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-6c585f8dbd-xnl6c -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@cross-site-sharded-replica-mongos.cross-site-sharded-replica-9589.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.OREwpdrQ6f + cat /tmp/tmp.VCVRFk3nWm + rm /tmp/tmp.OREwpdrQ6f /tmp/tmp.VCVRFk3nWm + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/e2e-tests/cross-site-sharded/compare/find.json /tmp/tmp.rd0HZlsLU2/find + desc 'Failover check finished successfully' + set +o xtrace ----------------------------------------------------------------------------------- Failover check finished successfully ----------------------------------------------------------------------------------- + wait_cluster_consistency cross-site-sharded-replica + local cluster_name=cross-site-sharded-replica + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb cross-site-sharded-replica -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J2reGktA2B +++ mktemp ++ local LAST_ERR=/tmp/tmp.A5JYLtE68O ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb cross-site-sharded-replica -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 1 ']' ++ break ++ cat /tmp/tmp.J2reGktA2B ++ cat /tmp/tmp.A5JYLtE68O ++ rm /tmp/tmp.J2reGktA2B /tmp/tmp.A5JYLtE68O ++ return 0 + [[ ready == \r\e\a\d\y ]] + echo + destroy cross-site-sharded-30230 true + local namespace=cross-site-sharded-30230 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.MFM8bHjJ3F ++ mktemp + local LAST_ERR=/tmp/tmp.QEo2ZL1hdi + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.MFM8bHjJ3F customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted + cat /tmp/tmp.QEo2ZL1hdi + rm /tmp/tmp.MFM8bHjJ3F /tmp/tmp.QEo2ZL1hdi + return 0 ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml ++ grep -v '\-\-\-' + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.zL547hUFMe ++ mktemp + local LAST_ERR=/tmp/tmp.jGxl8JgWOY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.zL547hUFMe + cat /tmp/tmp.jGxl8JgWOY + rm /tmp/tmp.zL547hUFMe /tmp/tmp.jGxl8JgWOY + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.UzZRtAD6Q2 ++ mktemp + local LAST_ERR=/tmp/tmp.UpxPgfpOda + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.UzZRtAD6Q2 + cat /tmp/tmp.UpxPgfpOda + rm /tmp/tmp.UzZRtAD6Q2 /tmp/tmp.UpxPgfpOda + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.le4yNkOeDb ++ mktemp + local LAST_ERR=/tmp/tmp.VX9xsN1f6n + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.le4yNkOeDb + cat /tmp/tmp.VX9xsN1f6n + rm /tmp/tmp.le4yNkOeDb /tmp/tmp.VX9xsN1f6n + return 0 + local rbac_yaml=rbac.yaml + '[' -n '' ']' + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.85j7ZpEvGg ++ mktemp + local LAST_ERR=/tmp/tmp.wgSJbsFFCA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.85j7ZpEvGg role.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted serviceaccount "percona-server-mongodb-operator" deleted rolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted + cat /tmp/tmp.wgSJbsFFCA + rm /tmp/tmp.85j7ZpEvGg /tmp/tmp.wgSJbsFFCA + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.54a7pZaXGN ++ mktemp + local LAST_ERR=/tmp/tmp.fdymQiShQg + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.54a7pZaXGN + cat /tmp/tmp.fdymQiShQg Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.54a7pZaXGN + cat /tmp/tmp.fdymQiShQg Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.54a7pZaXGN + cat /tmp/tmp.fdymQiShQg Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.54a7pZaXGN + cat /tmp/tmp.fdymQiShQg Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.54a7pZaXGN /tmp/tmp.fdymQiShQg + return 1 + true + '[' -n '' ']' + '[' -n '' ']' + rm -rf /tmp/tmp.rd0HZlsLU2 + kubectl_bin delete --grace-period=0 --force=true namespace cross-site-sharded-30230 ++ mktemp + local LAST_OUT=/tmp/tmp.PPUe7e0d6Y ++ mktemp + local LAST_ERR=/tmp/tmp.BE5qm3aCNU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace cross-site-sharded-30230 + destroy cross-site-sharded-replica-9589 true + local namespace=cross-site-sharded-replica-9589 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false ']' + delete_crd + desc 'get and delete old CRDs and RBAC' + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml --ignore-not-found --wait=false ++ mktemp + local LAST_OUT=/tmp/tmp.XlM8DWKswt ++ mktemp + local LAST_ERR=/tmp/tmp.0EBfGlYYKt + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml --ignore-not-found --wait=false + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.XlM8DWKswt + cat /tmp/tmp.0EBfGlYYKt + rm /tmp/tmp.XlM8DWKswt /tmp/tmp.0EBfGlYYKt + return 0 ++ grep -v '\-\-\-' ++ yq eval .metadata.name /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/crd.yaml + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + kubectl get perconaservermongodbbackups.psmdb.percona.com --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbbackups.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" + : + kubectl_bin wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.wHN4Bg25Jh ++ mktemp + local LAST_ERR=/tmp/tmp.gaIRCVmyfO + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbbackups.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.wHN4Bg25Jh + cat /tmp/tmp.gaIRCVmyfO + rm /tmp/tmp.wHN4Bg25Jh /tmp/tmp.gaIRCVmyfO + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + kubectl get perconaservermongodbrestores.psmdb.percona.com --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbrestores.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" + : + kubectl_bin wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.LW4pI4K8NI ++ mktemp + local LAST_ERR=/tmp/tmp.IVlnOeNLtQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbrestores.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.LW4pI4K8NI + cat /tmp/tmp.IVlnOeNLtQ + rm /tmp/tmp.LW4pI4K8NI /tmp/tmp.IVlnOeNLtQ + return 0 + for crd_name in '$(yq eval '\''.metadata.name'\'' "${src_dir}/deploy/crd.yaml" | grep -v '\''\-\-\-'\'')' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch perconaservermongodbs.psmdb.percona.com -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get perconaservermongodbs.psmdb.percona.com --all-namespaces -o wide error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" + : + kubectl_bin wait --for=delete crd perconaservermongodbs.psmdb.percona.com ++ mktemp + local LAST_OUT=/tmp/tmp.F5PqdPVEhZ ++ mktemp + local LAST_ERR=/tmp/tmp.XQthpv3qcA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=delete crd perconaservermongodbs.psmdb.percona.com + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.F5PqdPVEhZ + cat /tmp/tmp.XQthpv3qcA + rm /tmp/tmp.F5PqdPVEhZ /tmp/tmp.XQthpv3qcA + return 0 + local rbac_yaml=rbac.yaml + '[' -n '' ']' + kubectl_bin delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/rbac.yaml --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.q2gULekuSL ++ mktemp + local LAST_ERR=/tmp/tmp.z4hz07g9IA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1608/deploy/rbac.yaml --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.q2gULekuSL + cat /tmp/tmp.z4hz07g9IA + rm /tmp/tmp.q2gULekuSL /tmp/tmp.z4hz07g9IA + return 0 + destroy_cert_manager + kubectl_bin delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.n0yl4Jn3au ++ mktemp + local LAST_ERR=/tmp/tmp.BHEzposSZL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 1 ']' + break + cat /tmp/tmp.PPUe7e0d6Y namespace "cross-site-sharded-30230" force deleted + cat /tmp/tmp.BE5qm3aCNU Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. + rm /tmp/tmp.PPUe7e0d6Y /tmp/tmp.BE5qm3aCNU + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.n0yl4Jn3au + cat /tmp/tmp.BHEzposSZL Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.n0yl4Jn3au + cat /tmp/tmp.BHEzposSZL Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 4 + for i in '$(seq 0 2)' + set +e + kubectl delete -f https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml + exit_status=1 + set -e + '[' 1 '!=' 0 -a -n 1 ']' + cat /tmp/tmp.n0yl4Jn3au + cat /tmp/tmp.BHEzposSZL Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + sleep 8 + cat /tmp/tmp.n0yl4Jn3au + cat /tmp/tmp.BHEzposSZL Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": namespaces "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificaterequests.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "certificates.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "challenges.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "clusterissuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "issuers.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": customresourcedefinitions.apiextensions.k8s.io "orders.acme.cert-manager.io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": serviceaccounts "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-cluster-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-view" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-edit" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterroles.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-issuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificates" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-orders" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-challenges" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": clusterrolebindings.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": roles.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager:leaderelection" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": rolebindings.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": services "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-cainjector" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": deployments.apps "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": mutatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found Error from server (NotFound): error when deleting "https://github.com/cert-manager/cert-manager/releases/download/v1.15.3/cert-manager.yaml": validatingwebhookconfigurations.admissionregistration.k8s.io "cert-manager-webhook" not found + rm /tmp/tmp.n0yl4Jn3au /tmp/tmp.BHEzposSZL + return 1 + true + '[' -n '' ']' + '[' -n '' ']' + rm -rf /tmp/tmp.rd0HZlsLU2 + kubectl_bin delete --grace-period=0 --force=true namespace cross-site-sharded-replica-9589 ++ mktemp + local LAST_OUT=/tmp/tmp.ryF3I4MBc7 ++ mktemp + local LAST_ERR=/tmp/tmp.37fzFQxfpd + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace cross-site-sharded-replica-9589 + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed -----------------------------------------------------------------------------------