++ echo 'Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/logs/demand-backup-sharded.log' Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/logs/demand-backup-sharded.log ++ '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/conf/cloud-secret.yml ']' ++ SKIP_BACKUPS_TO_AWS_GCP_AZURE= ++ oc get projects ++ kubectl get nodes ++ grep '^minikube' +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep '\-eks\-' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' ']' ++ EKS=0 +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep gke WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' v1.26.15-gke.1243000 ']' ++ GKE=1 +++ kubectl version -o json +++ jq -r '.serverVersion.major + "." + .serverVersion.minor' +++ /usr/bin/sed -r 's/[^0-9.]+//g' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ KUBE_VERSION=1.26 + set_debug + [[ 0 == 1 ]] + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- namespace/psmdb-operator created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1541-0dd139a1-1-cluster2" modified. ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created deployment.apps/percona-server-mongodb-operator created waiting for pod/percona-server-mongodb-operator-685687d74f-zcb76 to be ready.OK ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-sharded-19394 ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace demand-backup-sharded-19394 ----------------------------------------------------------------------------------- namespace/demand-backup-sharded-19394 created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1541-0dd139a1-1-cluster2" modified. ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- Error: uninstall: Release not loaded: minio-service: release: not found "minio" has been removed from your repositories "minio" has been added to your repositories NAME: minio-service LAST DEPLOYED: Mon May 6 02:26:47 2024 NAMESPACE: demand-backup-sharded-19394 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-sharded-19394.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-sharded-19394 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-sharded-19394 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-sharded-19394 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-sharded-19394 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local waiting for pod/minio-service-57dd49b-nnncz to be ready.OK service/minio-service created make_bucket: operator-testing pod "aws-cli" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-sharded-19394 ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- secret/some-users created deployment.apps/psmdb-client created ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created runtimeclass.node.k8s.io/container-rc unchanged perconaservermongodb.psmdb.percona.com/some-name created ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- waiting for pod/some-name-rs0-0 to be ready..........OK waiting for pod/some-name-rs0-1 to be ready........OK waiting for pod/some-name-rs0-2 to be ready.....OK Waiting for cluster readyness............................................ waiting for pod/some-name-cfg-0 to be ready.OK waiting for pod/some-name-cfg-1 to be ready.OK waiting for pod/some-name-cfg-2 to be ready.OK waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- secret/some-name-mongos created waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness.............................................................................................. ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("8ac5f0a4-6736-4ef5-a111-7882052f47c7") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" }, { "db" : "myApp1", "role" : "readWrite" }, { "db" : "myApp2", "role" : "readWrite" } ] } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("c1813eb6-f854-4fa6-b449-72ab0fa6d359") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714962929, 11), "signature" : { "hash" : BinData(0,"W7kkYlxHilzCn830X/CR+JCuiu4="), "keyId" : NumberLong("7365707950150647831") } }, "operationTime" : Timestamp(1714962929, 5) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("d3d7e969-056f-4d6a-a5e4-242608a7b8bd") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714962933, 10), "signature" : { "hash" : BinData(0,"+lWFvvU5Bkt6DqI2ugIIh47JXa0="), "keyId" : NumberLong("7365707950150647831") } }, "operationTime" : Timestamp(1714962933, 5) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3e55c6d2-04c4-4182-bd57-80821f8526b4") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714962937, 9), "signature" : { "hash" : BinData(0,"0bk7HIivWkWoJaSOJXBpGYEo9nk="), "keyId" : NumberLong("7365707950150647831") } }, "operationTime" : Timestamp(1714962937, 4) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("945f1fca-9640-4a93-8efb-fea0453dd5e7") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("fb0c4db4-2ea3-488e-abea-75180e248406") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("0871a2c3-9f21-446e-9031-83f1578ea752") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-minio created ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created backup-aws-s3.................................... backup-gcp-cs...................... backup-azure-blob................................... backup-minio. ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("bee8e102-e85e-4716-a471-ae1a02ace5e7") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("8b9425fe-2e47-4f5d-be42-b8448dbfc044") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3c023a26-168d-401e-baa2-1da1641a7caf") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created waiting psmdb-restore/backup-aws-s3 to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WAtiLjgxUz +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Sxy9yA0qJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.WAtiLjgxUz ++ cat /tmp/tmp.2Sxy9yA0qJ ++ rm /tmp/tmp.WAtiLjgxUz /tmp/tmp.2Sxy9yA0qJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yUCBzpCrkO +++ mktemp ++ local LAST_ERR=/tmp/tmp.U8tn3AG5JI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.yUCBzpCrkO ++ cat /tmp/tmp.U8tn3AG5JI ++ rm /tmp/tmp.yUCBzpCrkO /tmp/tmp.U8tn3AG5JI ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auBbNU1I2Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.uHhNBWNOki ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.auBbNU1I2Q ++ cat /tmp/tmp.uHhNBWNOki ++ rm /tmp/tmp.auBbNU1I2Q /tmp/tmp.uHhNBWNOki ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6Xx6Jib5eY +++ mktemp ++ local LAST_ERR=/tmp/tmp.TFvMz4TNA5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.6Xx6Jib5eY ++ cat /tmp/tmp.TFvMz4TNA5 ++ rm /tmp/tmp.6Xx6Jib5eY /tmp/tmp.TFvMz4TNA5 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KwRsmr184z +++ mktemp ++ local LAST_ERR=/tmp/tmp.rsUs8EK8eA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.KwRsmr184z ++ cat /tmp/tmp.rsUs8EK8eA ++ rm /tmp/tmp.KwRsmr184z /tmp/tmp.rsUs8EK8eA ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EUcJSE5YFB +++ mktemp ++ local LAST_ERR=/tmp/tmp.oiMeuHDXNp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.EUcJSE5YFB ++ cat /tmp/tmp.oiMeuHDXNp ++ rm /tmp/tmp.EUcJSE5YFB /tmp/tmp.oiMeuHDXNp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vypy8Q5bjQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tpl9T6f6Ck ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Vypy8Q5bjQ ++ cat /tmp/tmp.Tpl9T6f6Ck ++ rm /tmp/tmp.Vypy8Q5bjQ /tmp/tmp.Tpl9T6f6Ck ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.90GgzZompW +++ mktemp ++ local LAST_ERR=/tmp/tmp.FYDRqBTrQn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.90GgzZompW ++ cat /tmp/tmp.FYDRqBTrQn ++ rm /tmp/tmp.90GgzZompW /tmp/tmp.FYDRqBTrQn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YYoyzuuM6E +++ mktemp ++ local LAST_ERR=/tmp/tmp.KF6vjIJoti ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.YYoyzuuM6E ++ cat /tmp/tmp.KF6vjIJoti ++ rm /tmp/tmp.YYoyzuuM6E /tmp/tmp.KF6vjIJoti ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gaQsmrGgTb +++ mktemp ++ local LAST_ERR=/tmp/tmp.wRCMxavf9i ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.gaQsmrGgTb ++ cat /tmp/tmp.wRCMxavf9i ++ rm /tmp/tmp.gaQsmrGgTb /tmp/tmp.wRCMxavf9i ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.TQVlxDfFxZ ++ mktemp + local LAST_ERR=/tmp/tmp.iB7fjh4c7U + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.TQVlxDfFxZ + cat /tmp/tmp.iB7fjh4c7U + rm /tmp/tmp.TQVlxDfFxZ /tmp/tmp.iB7fjh4c7U + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.YPIatqXVQC/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pqjvpvhpRT +++ mktemp ++ local LAST_ERR=/tmp/tmp.g4BtAc6TjT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.pqjvpvhpRT ++ cat /tmp/tmp.g4BtAc6TjT ++ rm /tmp/tmp.pqjvpvhpRT /tmp/tmp.g4BtAc6TjT ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.8aY3X3ErVX ++ mktemp + local LAST_ERR=/tmp/tmp.V2TnH8ymRo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.8aY3X3ErVX + cat /tmp/tmp.V2TnH8ymRo + rm /tmp/tmp.8aY3X3ErVX /tmp/tmp.V2TnH8ymRo + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.YPIatqXVQC/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vIrvnjc77F +++ mktemp ++ local LAST_ERR=/tmp/tmp.F4RgO9uDU1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.vIrvnjc77F ++ cat /tmp/tmp.F4RgO9uDU1 ++ rm /tmp/tmp.vIrvnjc77F /tmp/tmp.F4RgO9uDU1 ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.NwzHaTOha4 ++ mktemp + local LAST_ERR=/tmp/tmp.ldkM94aRGS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.NwzHaTOha4 + cat /tmp/tmp.ldkM94aRGS + rm /tmp/tmp.NwzHaTOha4 /tmp/tmp.ldkM94aRGS + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.YPIatqXVQC/find2 + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("c137f859-823c-4283-85ac-533b3f9e25ce") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("2e0a719c-75ca-4dd2-986e-5387b583d03b") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("6c11a012-b2d4-46c7-857e-2bc98928edad") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created waiting psmdb-restore/backup-gcp-cs to reach ready state........... + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gWk4tGTzp8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yQB8Gq4kRy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.gWk4tGTzp8 ++ cat /tmp/tmp.yQB8Gq4kRy ++ rm /tmp/tmp.gWk4tGTzp8 /tmp/tmp.yQB8Gq4kRy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VviLcbooCK +++ mktemp ++ local LAST_ERR=/tmp/tmp.sug9FOfIqW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.VviLcbooCK ++ cat /tmp/tmp.sug9FOfIqW ++ rm /tmp/tmp.VviLcbooCK /tmp/tmp.sug9FOfIqW ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Mkg5C012oS +++ mktemp ++ local LAST_ERR=/tmp/tmp.AV98c3tILe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Mkg5C012oS ++ cat /tmp/tmp.AV98c3tILe ++ rm /tmp/tmp.Mkg5C012oS /tmp/tmp.AV98c3tILe ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iVz2S6wXGZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.WgRng5yAeY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.iVz2S6wXGZ ++ cat /tmp/tmp.WgRng5yAeY ++ rm /tmp/tmp.iVz2S6wXGZ /tmp/tmp.WgRng5yAeY ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Tke3G5RehG +++ mktemp ++ local LAST_ERR=/tmp/tmp.PbGnzrQSY9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Tke3G5RehG ++ cat /tmp/tmp.PbGnzrQSY9 ++ rm /tmp/tmp.Tke3G5RehG /tmp/tmp.PbGnzrQSY9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gfcDoNx5y1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.MU3OzZ1yfF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.gfcDoNx5y1 ++ cat /tmp/tmp.MU3OzZ1yfF ++ rm /tmp/tmp.gfcDoNx5y1 /tmp/tmp.MU3OzZ1yfF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T9mXvLL8pP +++ mktemp ++ local LAST_ERR=/tmp/tmp.evEpi7FFVJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.T9mXvLL8pP ++ cat /tmp/tmp.evEpi7FFVJ ++ rm /tmp/tmp.T9mXvLL8pP /tmp/tmp.evEpi7FFVJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EMlsGFfmcR +++ mktemp ++ local LAST_ERR=/tmp/tmp.xiKGJYen0c ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.EMlsGFfmcR ++ cat /tmp/tmp.xiKGJYen0c ++ rm /tmp/tmp.EMlsGFfmcR /tmp/tmp.xiKGJYen0c ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gnviAjpwAb +++ mktemp ++ local LAST_ERR=/tmp/tmp.kOIfP2XyQX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.gnviAjpwAb ++ cat /tmp/tmp.kOIfP2XyQX ++ rm /tmp/tmp.gnviAjpwAb /tmp/tmp.kOIfP2XyQX ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.8iJ31NgTBp ++ mktemp + local LAST_ERR=/tmp/tmp.BV0KISD20S + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.8iJ31NgTBp + cat /tmp/tmp.BV0KISD20S + rm /tmp/tmp.8iJ31NgTBp /tmp/tmp.BV0KISD20S + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.YPIatqXVQC/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ local LAST_OUT=/tmp/tmp.49mpFjXs0P +++ mktemp ++ local LAST_ERR=/tmp/tmp.S74XXWjF1S ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.49mpFjXs0P ++ cat /tmp/tmp.S74XXWjF1S ++ rm /tmp/tmp.49mpFjXs0P /tmp/tmp.S74XXWjF1S ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.d1YZNk7uKC ++ mktemp + local LAST_ERR=/tmp/tmp.hOGEP9TWLv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.d1YZNk7uKC + cat /tmp/tmp.hOGEP9TWLv + rm /tmp/tmp.d1YZNk7uKC /tmp/tmp.hOGEP9TWLv + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.YPIatqXVQC/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ngwX3uC6Qz +++ mktemp ++ local LAST_ERR=/tmp/tmp.IpSATJLjgE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ngwX3uC6Qz ++ cat /tmp/tmp.IpSATJLjgE ++ rm /tmp/tmp.ngwX3uC6Qz /tmp/tmp.IpSATJLjgE ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.tGbaZeBY0B ++ mktemp + local LAST_ERR=/tmp/tmp.WnU1NrmiDW + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.tGbaZeBY0B + cat /tmp/tmp.WnU1NrmiDW + rm /tmp/tmp.tGbaZeBY0B /tmp/tmp.WnU1NrmiDW + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.YPIatqXVQC/find2 + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("f01cfffa-a0d9-43f7-a625-3eec847375eb") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("01fd0afd-8f8e-4b15-992a-379923e7f50e") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("1a881a41-62eb-445a-a2f1-8f845c1feb28") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created waiting psmdb-restore/backup-azure-blob to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FzBX7mH6cH +++ mktemp ++ local LAST_ERR=/tmp/tmp.DuifoA3uzL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.FzBX7mH6cH ++ cat /tmp/tmp.DuifoA3uzL ++ rm /tmp/tmp.FzBX7mH6cH /tmp/tmp.DuifoA3uzL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q6yiaIYMH7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oL6z9eAlDm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Q6yiaIYMH7 ++ cat /tmp/tmp.oL6z9eAlDm ++ rm /tmp/tmp.Q6yiaIYMH7 /tmp/tmp.oL6z9eAlDm ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dlsADZdreD +++ mktemp ++ local LAST_ERR=/tmp/tmp.kYLwPqRqJn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.dlsADZdreD ++ cat /tmp/tmp.kYLwPqRqJn ++ rm /tmp/tmp.dlsADZdreD /tmp/tmp.kYLwPqRqJn ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jvml1S9a2q +++ mktemp ++ local LAST_ERR=/tmp/tmp.imHqFUWbje ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Jvml1S9a2q ++ cat /tmp/tmp.imHqFUWbje ++ rm /tmp/tmp.Jvml1S9a2q /tmp/tmp.imHqFUWbje ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DtToeCrxXu +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hgr6B5kSoa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.DtToeCrxXu ++ cat /tmp/tmp.Hgr6B5kSoa ++ rm /tmp/tmp.DtToeCrxXu /tmp/tmp.Hgr6B5kSoa ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XplQME6CIN +++ mktemp ++ local LAST_ERR=/tmp/tmp.cPajJDUTkD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.XplQME6CIN ++ cat /tmp/tmp.cPajJDUTkD ++ rm /tmp/tmp.XplQME6CIN /tmp/tmp.cPajJDUTkD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xxa4BSFiyJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hv5hq5GlB5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Xxa4BSFiyJ ++ cat /tmp/tmp.Hv5hq5GlB5 ++ rm /tmp/tmp.Xxa4BSFiyJ /tmp/tmp.Hv5hq5GlB5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OcVkdIQU2M +++ mktemp ++ local LAST_ERR=/tmp/tmp.mzeEPudwpp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.OcVkdIQU2M ++ cat /tmp/tmp.mzeEPudwpp ++ rm /tmp/tmp.OcVkdIQU2M /tmp/tmp.mzeEPudwpp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AgoNpCPXne +++ mktemp ++ local LAST_ERR=/tmp/tmp.TRvgUl2Vqd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.AgoNpCPXne ++ cat /tmp/tmp.TRvgUl2Vqd ++ rm /tmp/tmp.AgoNpCPXne /tmp/tmp.TRvgUl2Vqd ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F5pYp32t8K +++ mktemp ++ local LAST_ERR=/tmp/tmp.qz9CkxKRFI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.F5pYp32t8K ++ cat /tmp/tmp.qz9CkxKRFI ++ rm /tmp/tmp.F5pYp32t8K /tmp/tmp.qz9CkxKRFI ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.rk8IhGkjfr ++ mktemp + local LAST_ERR=/tmp/tmp.lF1523zQoz + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.rk8IhGkjfr + cat /tmp/tmp.lF1523zQoz + rm /tmp/tmp.rk8IhGkjfr /tmp/tmp.lF1523zQoz + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.YPIatqXVQC/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zoNRnQTJKG +++ mktemp ++ local LAST_ERR=/tmp/tmp.RdripCE8nH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.zoNRnQTJKG ++ cat /tmp/tmp.RdripCE8nH ++ rm /tmp/tmp.zoNRnQTJKG /tmp/tmp.RdripCE8nH ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.dLfBIngtJY ++ mktemp + local LAST_ERR=/tmp/tmp.qM5DyI4RBC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.dLfBIngtJY + cat /tmp/tmp.qM5DyI4RBC + rm /tmp/tmp.dLfBIngtJY /tmp/tmp.qM5DyI4RBC + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.YPIatqXVQC/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jplzjaw6Fz +++ mktemp ++ local LAST_ERR=/tmp/tmp.bpntnB48Jt ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.jplzjaw6Fz ++ cat /tmp/tmp.bpntnB48Jt ++ rm /tmp/tmp.jplzjaw6Fz /tmp/tmp.bpntnB48Jt ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.c74YN1OQLi ++ mktemp + local LAST_ERR=/tmp/tmp.a8JXD6vDKv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.c74YN1OQLi + cat /tmp/tmp.a8JXD6vDKv + rm /tmp/tmp.c74YN1OQLi /tmp/tmp.a8JXD6vDKv + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.YPIatqXVQC/find2 + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-sharded-19394 2024-05-06 02:36:32 55 myApp.test.gz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("00424156-66f7-4a7f-b62f-ddfda63ae46e") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("d45b3764-78ba-4062-addd-ddb2afabd667") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-19394.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("ee233413-b24f-4c50-b6fe-d96fc3fefca1") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created waiting psmdb-restore/backup-minio to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v0heGqI8q9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SgfL31V74r ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.v0heGqI8q9 ++ cat /tmp/tmp.SgfL31V74r ++ rm /tmp/tmp.v0heGqI8q9 /tmp/tmp.SgfL31V74r ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XXE423gR7b +++ mktemp ++ local LAST_ERR=/tmp/tmp.imHHaWGpr5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.XXE423gR7b ++ cat /tmp/tmp.imHHaWGpr5 ++ rm /tmp/tmp.XXE423gR7b /tmp/tmp.imHHaWGpr5 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QDrhSmemuw +++ mktemp ++ local LAST_ERR=/tmp/tmp.co0OwAc658 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.QDrhSmemuw ++ cat /tmp/tmp.co0OwAc658 ++ rm /tmp/tmp.QDrhSmemuw /tmp/tmp.co0OwAc658 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yFsDJsW1dd +++ mktemp ++ local LAST_ERR=/tmp/tmp.UtMzbrnao6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.yFsDJsW1dd ++ cat /tmp/tmp.UtMzbrnao6 ++ rm /tmp/tmp.yFsDJsW1dd /tmp/tmp.UtMzbrnao6 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SzowlpR3aG +++ mktemp ++ local LAST_ERR=/tmp/tmp.el6hjlvPPY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.SzowlpR3aG ++ cat /tmp/tmp.el6hjlvPPY ++ rm /tmp/tmp.SzowlpR3aG /tmp/tmp.el6hjlvPPY ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ChHinRAyMf +++ mktemp ++ local LAST_ERR=/tmp/tmp.Su1xHIS1vY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ChHinRAyMf ++ cat /tmp/tmp.Su1xHIS1vY ++ rm /tmp/tmp.ChHinRAyMf /tmp/tmp.Su1xHIS1vY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jPLcSLe2ua +++ mktemp ++ local LAST_ERR=/tmp/tmp.KI4mnUHUHV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.jPLcSLe2ua ++ cat /tmp/tmp.KI4mnUHUHV ++ rm /tmp/tmp.jPLcSLe2ua /tmp/tmp.KI4mnUHUHV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pRZzjS78iW +++ mktemp ++ local LAST_ERR=/tmp/tmp.2fXLTThcwm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.pRZzjS78iW ++ cat /tmp/tmp.2fXLTThcwm ++ rm /tmp/tmp.pRZzjS78iW /tmp/tmp.2fXLTThcwm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7gdwm6whPY +++ mktemp ++ local LAST_ERR=/tmp/tmp.t6dlv5kkNm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.7gdwm6whPY ++ cat /tmp/tmp.t6dlv5kkNm ++ rm /tmp/tmp.7gdwm6whPY /tmp/tmp.t6dlv5kkNm ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BMooHDzKDE +++ mktemp ++ local LAST_ERR=/tmp/tmp.C7Wko0M5Wf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.BMooHDzKDE ++ cat /tmp/tmp.C7Wko0M5Wf ++ rm /tmp/tmp.BMooHDzKDE /tmp/tmp.C7Wko0M5Wf ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.HcOjzLr9eK ++ mktemp + local LAST_ERR=/tmp/tmp.DplXxJ9Cjx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.HcOjzLr9eK + cat /tmp/tmp.DplXxJ9Cjx + rm /tmp/tmp.HcOjzLr9eK /tmp/tmp.DplXxJ9Cjx + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.YPIatqXVQC/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EXAzlHQkLj +++ mktemp ++ local LAST_ERR=/tmp/tmp.e7lhL7ZIni ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.EXAzlHQkLj ++ cat /tmp/tmp.e7lhL7ZIni ++ rm /tmp/tmp.EXAzlHQkLj /tmp/tmp.e7lhL7ZIni ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.kRDAa2ko1n ++ mktemp + local LAST_ERR=/tmp/tmp.GbkjsEZ9rV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.kRDAa2ko1n + cat /tmp/tmp.GbkjsEZ9rV + rm /tmp/tmp.kRDAa2ko1n /tmp/tmp.GbkjsEZ9rV + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.YPIatqXVQC/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-19394 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-19394 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-19394 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TE2MYTeqay +++ mktemp ++ local LAST_ERR=/tmp/tmp.KmjE1URlpE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.TE2MYTeqay ++ cat /tmp/tmp.KmjE1URlpE ++ rm /tmp/tmp.TE2MYTeqay /tmp/tmp.KmjE1URlpE ++ return 0 + local client_container=psmdb-client-5dc94d5b48-r52d7 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.uaBHHPqAwv ++ mktemp + local LAST_ERR=/tmp/tmp.U9bxOu2Lpx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-r52d7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-19394.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.uaBHHPqAwv + cat /tmp/tmp.U9bxOu2Lpx + rm /tmp/tmp.uaBHHPqAwv /tmp/tmp.U9bxOu2Lpx + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1541/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.YPIatqXVQC/find2 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-sharded-19394 ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- secrets=YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 pods=minio-service-57dd49b-nnncz psmdb-client-5dc94d5b48-r52d7 some-name-cfg-0 some-name-cfg-1 some-name-cfg-2 some-name-mongos-0 some-name-mongos-1 some-name-mongos-2 some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-minio-service-57dd49b-nnncz-minio.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-psmdb-client-5dc94d5b48-r52d7-psmdb-client.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-0-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-0-cfg-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-0-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-1-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-1-cfg-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-1-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-2-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-2-cfg-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-cfg-2-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-mongos-0-mongos.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-mongos-0-mongos-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-mongos-1-mongos.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-mongos-1-mongos-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-mongos-2-mongos.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-mongos-2-mongos-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs0-0-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs0-0-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs0-1-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs0-1-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs0-2-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs0-2-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs1-0-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs1-0-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs1-1-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs1-1-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs1-2-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs1-2-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-0-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-0-rs-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-0-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-1-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-1-rs-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-1-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-2-mongod.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-2-rs-sidecar-1.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-some-name-rs2-2-backup-agent.txt logs saved in: /tmp/tmp.YPIatqXVQC/logs_output-percona-server-mongodb-operator-685687d74f-zcb76-percona-server-mongodb-operator.txt runtimeclass.node.k8s.io "container-rc" deleted ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted