++ echo 'Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/logs/demand-backup-sharded.log' Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/logs/demand-backup-sharded.log ++ '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/conf/cloud-secret.yml ']' ++ SKIP_BACKUPS_TO_AWS_GCP_AZURE= ++ oc get projects ++ kubectl get nodes ++ grep '^minikube' +++ kubectl version -o json +++ grep '\-eks\-' +++ jq -r .serverVersion.gitVersion WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' ']' ++ EKS=0 +++ jq -r .serverVersion.gitVersion +++ grep gke +++ kubectl version -o json WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' v1.26.15-gke.1300000 ']' ++ GKE=1 +++ jq -r '.serverVersion.major + "." + .serverVersion.minor' +++ /usr/bin/sed -r 's/[^0-9.]+//g' +++ kubectl version -o json WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ KUBE_VERSION=1.26 + set_debug + [[ 0 == 1 ]] + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- namespace/psmdb-operator created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1548-d2731110-1-cluster2" modified. ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created deployment.apps/percona-server-mongodb-operator created waiting for pod/percona-server-mongodb-operator-7f9f6d57f8-zr9hr to be ready.OK ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-sharded-16352 ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace demand-backup-sharded-16352 ----------------------------------------------------------------------------------- namespace/demand-backup-sharded-16352 created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1548-d2731110-1-cluster2" modified. ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- Error: uninstall: Release not loaded: minio-service: release: not found "minio" has been removed from your repositories "minio" has been added to your repositories NAME: minio-service LAST DEPLOYED: Mon May 13 01:44:17 2024 NAMESPACE: demand-backup-sharded-16352 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-sharded-16352.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-sharded-16352 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-sharded-16352 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-sharded-16352 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-sharded-16352 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local waiting for pod/minio-service-57dd49b-mhr7q to be ready.OK service/minio-service created make_bucket: operator-testing pod "aws-cli" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: error attaching to container: container is in CONTAINER_EXITED state ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- secret/some-users created deployment.apps/psmdb-client created ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created runtimeclass.node.k8s.io/container-rc unchanged perconaservermongodb.psmdb.percona.com/some-name created ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- waiting for pod/some-name-rs0-0 to be ready...........OK waiting for pod/some-name-rs0-1 to be ready......OK waiting for pod/some-name-rs0-2 to be ready......OK Waiting for cluster readyness....................................... waiting for pod/some-name-cfg-0 to be ready.OK waiting for pod/some-name-cfg-1 to be ready.OK waiting for pod/some-name-cfg-2 to be ready.OK waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- secret/some-name-mongos created waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness........................................................................................ ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("f917e676-202e-4d2b-93d2-480fd8b96691") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" }, { "db" : "myApp1", "role" : "readWrite" }, { "db" : "myApp2", "role" : "readWrite" } ] } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("efd748fc-9d2c-49bb-807a-eca8b435941e") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1715565180, 16), "signature" : { "hash" : BinData(0,"uNf8rutwxIMWwnxBaPEv5gSY/IY="), "keyId" : NumberLong("7368294619974467607") } }, "operationTime" : Timestamp(1715565180, 10) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("ac867005-c882-4627-b689-2be7bb547487") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1715565183, 11), "signature" : { "hash" : BinData(0,"bkfkXGdFSMzZqe/DshKjLzS+GsU="), "keyId" : NumberLong("7368294619974467607") } }, "operationTime" : Timestamp(1715565183, 6) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("c19c0ff2-fd28-4822-a119-0ce2725f739e") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1715565187, 7), "signature" : { "hash" : BinData(0,"m+cpBIBFdVPd/NuCsngI+evrncs="), "keyId" : NumberLong("7368294619974467607") } }, "operationTime" : Timestamp(1715565187, 2) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9604dcb3-3cc9-4c1e-9231-ad3107bbd094") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9705a8b1-4233-4227-ac88-3d11d12c38c8") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("458e879c-6bdf-4b4c-b854-6ac117d07da8") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-minio created ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created backup-aws-s3...................................... backup-gcp-cs............... backup-azure-blob.................................... backup-minio. ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("06eeb5e3-9681-47ea-a094-744864ae8079") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3232f341-637a-48c8-a802-cdde73640b48") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("78305d8d-0735-43a1-adee-4afbacc02368") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created waiting psmdb-restore/backup-aws-s3 to reach ready state.............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kWssAQ5jhq +++ mktemp ++ local LAST_ERR=/tmp/tmp.UdOfYH2eJn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.kWssAQ5jhq ++ cat /tmp/tmp.UdOfYH2eJn ++ rm /tmp/tmp.kWssAQ5jhq /tmp/tmp.UdOfYH2eJn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KsdFqtyKd0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jb4WbzOR97 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.KsdFqtyKd0 ++ cat /tmp/tmp.jb4WbzOR97 ++ rm /tmp/tmp.KsdFqtyKd0 /tmp/tmp.jb4WbzOR97 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R22UE4XvTc +++ mktemp ++ local LAST_ERR=/tmp/tmp.HB8wGpUhAz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.R22UE4XvTc ++ cat /tmp/tmp.HB8wGpUhAz ++ rm /tmp/tmp.R22UE4XvTc /tmp/tmp.HB8wGpUhAz ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.si75Loe6Cq +++ mktemp ++ local LAST_ERR=/tmp/tmp.YF1wmZhvSM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.si75Loe6Cq ++ cat /tmp/tmp.YF1wmZhvSM ++ rm /tmp/tmp.si75Loe6Cq /tmp/tmp.YF1wmZhvSM ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4fVvz4OA5R +++ mktemp ++ local LAST_ERR=/tmp/tmp.OzIHyYMAZq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.4fVvz4OA5R ++ cat /tmp/tmp.OzIHyYMAZq ++ rm /tmp/tmp.4fVvz4OA5R /tmp/tmp.OzIHyYMAZq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LjRQzu2wA1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fgeywkoa7m ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.LjRQzu2wA1 ++ cat /tmp/tmp.Fgeywkoa7m ++ rm /tmp/tmp.LjRQzu2wA1 /tmp/tmp.Fgeywkoa7m ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ER37Vbx9Kp +++ mktemp ++ local LAST_ERR=/tmp/tmp.sBYOHtlU2M ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ER37Vbx9Kp ++ cat /tmp/tmp.sBYOHtlU2M ++ rm /tmp/tmp.ER37Vbx9Kp /tmp/tmp.sBYOHtlU2M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G1mK4YEtYZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.2M2njiEmFY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.G1mK4YEtYZ ++ cat /tmp/tmp.2M2njiEmFY ++ rm /tmp/tmp.G1mK4YEtYZ /tmp/tmp.2M2njiEmFY ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.aCWc4oI63D +++ mktemp ++ local LAST_ERR=/tmp/tmp.w6Vngw5PJV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.aCWc4oI63D ++ cat /tmp/tmp.w6Vngw5PJV ++ rm /tmp/tmp.aCWc4oI63D /tmp/tmp.w6Vngw5PJV ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.3K3sytJ2cj ++ mktemp + local LAST_ERR=/tmp/tmp.Pi1tjFA6vv + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.3K3sytJ2cj + cat /tmp/tmp.Pi1tjFA6vv + rm /tmp/tmp.3K3sytJ2cj /tmp/tmp.Pi1tjFA6vv + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.QxgF0rKXHT/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ujINAA9zG +++ mktemp ++ local LAST_ERR=/tmp/tmp.rJppu1PzCy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.6ujINAA9zG ++ cat /tmp/tmp.rJppu1PzCy ++ rm /tmp/tmp.6ujINAA9zG /tmp/tmp.rJppu1PzCy ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.gTJ7vdH1Zi ++ mktemp + local LAST_ERR=/tmp/tmp.0OV5EpiFK5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.gTJ7vdH1Zi + cat /tmp/tmp.0OV5EpiFK5 + rm /tmp/tmp.gTJ7vdH1Zi /tmp/tmp.0OV5EpiFK5 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.QxgF0rKXHT/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L0SvtnRg0m +++ mktemp ++ local LAST_ERR=/tmp/tmp.wOQy4AQd3r ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.L0SvtnRg0m ++ cat /tmp/tmp.wOQy4AQd3r ++ rm /tmp/tmp.L0SvtnRg0m /tmp/tmp.wOQy4AQd3r ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.XOSCA9jRuJ ++ mktemp + local LAST_ERR=/tmp/tmp.iUzeXgaRO2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.XOSCA9jRuJ + cat /tmp/tmp.iUzeXgaRO2 + rm /tmp/tmp.XOSCA9jRuJ /tmp/tmp.iUzeXgaRO2 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.QxgF0rKXHT/find2 + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3eb297a9-65e4-4534-ad62-17936541762a") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("07588293-2590-48e9-8a0b-5b4c0228cb0d") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("1a7ede18-8c19-4885-add0-328d44566f58") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created waiting psmdb-restore/backup-gcp-cs to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zi5E3iHv3J +++ mktemp ++ local LAST_ERR=/tmp/tmp.heAKGgFYPz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.zi5E3iHv3J ++ cat /tmp/tmp.heAKGgFYPz ++ rm /tmp/tmp.zi5E3iHv3J /tmp/tmp.heAKGgFYPz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.41CtXZZq5N +++ mktemp ++ local LAST_ERR=/tmp/tmp.7GnwXCuFf1 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.41CtXZZq5N ++ cat /tmp/tmp.7GnwXCuFf1 ++ rm /tmp/tmp.41CtXZZq5N /tmp/tmp.7GnwXCuFf1 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HJxWewFSen +++ mktemp ++ local LAST_ERR=/tmp/tmp.izTQv9PDGx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.HJxWewFSen ++ cat /tmp/tmp.izTQv9PDGx ++ rm /tmp/tmp.HJxWewFSen /tmp/tmp.izTQv9PDGx ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e4JbDGPVUR +++ mktemp ++ local LAST_ERR=/tmp/tmp.IjJteoj22e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.e4JbDGPVUR ++ cat /tmp/tmp.IjJteoj22e ++ rm /tmp/tmp.e4JbDGPVUR /tmp/tmp.IjJteoj22e ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ouBTFkQk0z +++ mktemp ++ local LAST_ERR=/tmp/tmp.MGPrn0ajCo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ouBTFkQk0z ++ cat /tmp/tmp.MGPrn0ajCo ++ rm /tmp/tmp.ouBTFkQk0z /tmp/tmp.MGPrn0ajCo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0dLydzamN5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.P5rTYQVZhY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.0dLydzamN5 ++ cat /tmp/tmp.P5rTYQVZhY ++ rm /tmp/tmp.0dLydzamN5 /tmp/tmp.P5rTYQVZhY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oJ4K0gPkt0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YahjOKzlg0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.oJ4K0gPkt0 ++ cat /tmp/tmp.YahjOKzlg0 ++ rm /tmp/tmp.oJ4K0gPkt0 /tmp/tmp.YahjOKzlg0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tfqbWeSDVf +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y4JTbnlLoj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.tfqbWeSDVf ++ cat /tmp/tmp.Y4JTbnlLoj ++ rm /tmp/tmp.tfqbWeSDVf /tmp/tmp.Y4JTbnlLoj ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XBQzs5Xv8u +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y1ANy3MVLe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.XBQzs5Xv8u ++ cat /tmp/tmp.Y1ANy3MVLe ++ rm /tmp/tmp.XBQzs5Xv8u /tmp/tmp.Y1ANy3MVLe ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.zDHM7aPGIZ ++ mktemp + local LAST_ERR=/tmp/tmp.NiY955ij3Y + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.zDHM7aPGIZ + cat /tmp/tmp.NiY955ij3Y + rm /tmp/tmp.zDHM7aPGIZ /tmp/tmp.NiY955ij3Y + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.QxgF0rKXHT/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EDsWTNvc98 +++ mktemp ++ local LAST_ERR=/tmp/tmp.aKDCVf2l60 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.EDsWTNvc98 ++ cat /tmp/tmp.aKDCVf2l60 ++ rm /tmp/tmp.EDsWTNvc98 /tmp/tmp.aKDCVf2l60 ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.zAgv9LrlLN ++ mktemp + local LAST_ERR=/tmp/tmp.WUT9oruZsS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.zAgv9LrlLN + cat /tmp/tmp.WUT9oruZsS + rm /tmp/tmp.zAgv9LrlLN /tmp/tmp.WUT9oruZsS + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.QxgF0rKXHT/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vkR2cmCHDr +++ mktemp ++ local LAST_ERR=/tmp/tmp.OLO4mipSKR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.vkR2cmCHDr ++ cat /tmp/tmp.OLO4mipSKR ++ rm /tmp/tmp.vkR2cmCHDr /tmp/tmp.OLO4mipSKR ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.SU8uflkQgB ++ mktemp + local LAST_ERR=/tmp/tmp.VChtXVcPoP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.SU8uflkQgB + cat /tmp/tmp.VChtXVcPoP + rm /tmp/tmp.SU8uflkQgB /tmp/tmp.VChtXVcPoP + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.QxgF0rKXHT/find2 + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("34f802d3-d9f9-404a-8d49-667a86bb8bd7") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9de933e0-8c6e-4ff1-9a25-b2b97627ca4f") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("e26d1f4f-af58-4090-b38e-70350661c1c5") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created waiting psmdb-restore/backup-azure-blob to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UpNo9GMZno +++ mktemp ++ local LAST_ERR=/tmp/tmp.2EHjqiwpPj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.UpNo9GMZno ++ cat /tmp/tmp.2EHjqiwpPj ++ rm /tmp/tmp.UpNo9GMZno /tmp/tmp.2EHjqiwpPj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oFBftLY3bn +++ mktemp ++ local LAST_ERR=/tmp/tmp.cf0OBvkxeN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.oFBftLY3bn ++ cat /tmp/tmp.cf0OBvkxeN ++ rm /tmp/tmp.oFBftLY3bn /tmp/tmp.cf0OBvkxeN ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dlBweSxmBr +++ mktemp ++ local LAST_ERR=/tmp/tmp.alJrlPDNIx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.dlBweSxmBr ++ cat /tmp/tmp.alJrlPDNIx ++ rm /tmp/tmp.dlBweSxmBr /tmp/tmp.alJrlPDNIx ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DOXPzhLmA0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YJegCyRE7C ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.DOXPzhLmA0 ++ cat /tmp/tmp.YJegCyRE7C ++ rm /tmp/tmp.DOXPzhLmA0 /tmp/tmp.YJegCyRE7C ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.igKaURSruo +++ mktemp ++ local LAST_ERR=/tmp/tmp.BRiQvE7dmX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.igKaURSruo ++ cat /tmp/tmp.BRiQvE7dmX ++ rm /tmp/tmp.igKaURSruo /tmp/tmp.BRiQvE7dmX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I6SNl5nNlB +++ mktemp ++ local LAST_ERR=/tmp/tmp.isAsN9S6xB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.I6SNl5nNlB ++ cat /tmp/tmp.isAsN9S6xB ++ rm /tmp/tmp.I6SNl5nNlB /tmp/tmp.isAsN9S6xB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nd2RWIwVOU +++ mktemp ++ local LAST_ERR=/tmp/tmp.rZDhIYObGj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.nd2RWIwVOU ++ cat /tmp/tmp.rZDhIYObGj ++ rm /tmp/tmp.nd2RWIwVOU /tmp/tmp.rZDhIYObGj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zd9hP3OcaC +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ntlv2TTupa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.zd9hP3OcaC ++ cat /tmp/tmp.Ntlv2TTupa ++ rm /tmp/tmp.zd9hP3OcaC /tmp/tmp.Ntlv2TTupa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YJnwlyrjeJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.wJ21AVyIPk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.YJnwlyrjeJ ++ cat /tmp/tmp.wJ21AVyIPk ++ rm /tmp/tmp.YJnwlyrjeJ /tmp/tmp.wJ21AVyIPk ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4mWsL1TVAY +++ mktemp ++ local LAST_ERR=/tmp/tmp.zwdLdPjqTi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.4mWsL1TVAY ++ cat /tmp/tmp.zwdLdPjqTi ++ rm /tmp/tmp.4mWsL1TVAY /tmp/tmp.zwdLdPjqTi ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.oPvAGG8ELv ++ mktemp + local LAST_ERR=/tmp/tmp.XqNriJPBwQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.oPvAGG8ELv + cat /tmp/tmp.XqNriJPBwQ + rm /tmp/tmp.oPvAGG8ELv /tmp/tmp.XqNriJPBwQ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.QxgF0rKXHT/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gs3YJQRVbu +++ mktemp ++ local LAST_ERR=/tmp/tmp.A3d8QxiJ0p ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Gs3YJQRVbu ++ cat /tmp/tmp.A3d8QxiJ0p ++ rm /tmp/tmp.Gs3YJQRVbu /tmp/tmp.A3d8QxiJ0p ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.BsCMoQLca9 ++ mktemp + local LAST_ERR=/tmp/tmp.G3OQ1hNufy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.BsCMoQLca9 + cat /tmp/tmp.G3OQ1hNufy + rm /tmp/tmp.BsCMoQLca9 /tmp/tmp.G3OQ1hNufy + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.QxgF0rKXHT/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4nihNFNpvv +++ mktemp ++ local LAST_ERR=/tmp/tmp.RMXtG9vOFJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.4nihNFNpvv ++ cat /tmp/tmp.RMXtG9vOFJ ++ rm /tmp/tmp.4nihNFNpvv /tmp/tmp.RMXtG9vOFJ ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.uqNhra9EAq ++ mktemp + local LAST_ERR=/tmp/tmp.manOR5wFAo + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.uqNhra9EAq + cat /tmp/tmp.manOR5wFAo + rm /tmp/tmp.uqNhra9EAq /tmp/tmp.manOR5wFAo + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.QxgF0rKXHT/find2 + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- 2024-05-13 01:54:02 55 myApp.test.gz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("c6eb09de-ceaf-4e87-8727-b34459277565") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("cc0f4e4d-3253-414e-bd74-c797fa445a0d") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-16352.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("8988bc15-4b37-4b17-84ae-9aac7eeb39bd") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created waiting psmdb-restore/backup-minio to reach ready state........... + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aDeu4yHLvg +++ mktemp ++ local LAST_ERR=/tmp/tmp.jqiHDMIEQb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.aDeu4yHLvg ++ cat /tmp/tmp.jqiHDMIEQb ++ rm /tmp/tmp.aDeu4yHLvg /tmp/tmp.jqiHDMIEQb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vE0pGm3FPG +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Y1rv33bre ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.vE0pGm3FPG ++ cat /tmp/tmp.8Y1rv33bre ++ rm /tmp/tmp.vE0pGm3FPG /tmp/tmp.8Y1rv33bre ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CBGqbRPbeN +++ mktemp ++ local LAST_ERR=/tmp/tmp.hmjDP5UZgH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.CBGqbRPbeN ++ cat /tmp/tmp.hmjDP5UZgH ++ rm /tmp/tmp.CBGqbRPbeN /tmp/tmp.hmjDP5UZgH ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ItKlhRt0yq +++ mktemp ++ local LAST_ERR=/tmp/tmp.4gjcogB7Kd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ItKlhRt0yq ++ cat /tmp/tmp.4gjcogB7Kd ++ rm /tmp/tmp.ItKlhRt0yq /tmp/tmp.4gjcogB7Kd ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KWGWcRCxOX +++ mktemp ++ local LAST_ERR=/tmp/tmp.b61WAtZyuC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.KWGWcRCxOX ++ cat /tmp/tmp.b61WAtZyuC ++ rm /tmp/tmp.KWGWcRCxOX /tmp/tmp.b61WAtZyuC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AS1iqtcCO6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dfls5daWVo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.AS1iqtcCO6 ++ cat /tmp/tmp.dfls5daWVo ++ rm /tmp/tmp.AS1iqtcCO6 /tmp/tmp.dfls5daWVo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cK7UDUxjLl +++ mktemp ++ local LAST_ERR=/tmp/tmp.7ouUjJGz85 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.cK7UDUxjLl ++ cat /tmp/tmp.7ouUjJGz85 ++ rm /tmp/tmp.cK7UDUxjLl /tmp/tmp.7ouUjJGz85 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3fJ30N7cFT +++ mktemp ++ local LAST_ERR=/tmp/tmp.YypKrj5CMh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.3fJ30N7cFT ++ cat /tmp/tmp.YypKrj5CMh ++ rm /tmp/tmp.3fJ30N7cFT /tmp/tmp.YypKrj5CMh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1l5iCF67EQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.1g7TZCFQjn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.1l5iCF67EQ ++ cat /tmp/tmp.1g7TZCFQjn ++ rm /tmp/tmp.1l5iCF67EQ /tmp/tmp.1g7TZCFQjn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 9 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I3X5jw6HVH +++ mktemp ++ local LAST_ERR=/tmp/tmp.UVGrOgbpYx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.I3X5jw6HVH ++ cat /tmp/tmp.UVGrOgbpYx ++ rm /tmp/tmp.I3X5jw6HVH /tmp/tmp.UVGrOgbpYx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 10 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X86S8IxcR5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.O5njuSt6TP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.X86S8IxcR5 ++ cat /tmp/tmp.O5njuSt6TP ++ rm /tmp/tmp.X86S8IxcR5 /tmp/tmp.O5njuSt6TP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 11 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3dRwl53TlF +++ mktemp ++ local LAST_ERR=/tmp/tmp.VK3skISE4e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.3dRwl53TlF ++ cat /tmp/tmp.VK3skISE4e ++ rm /tmp/tmp.3dRwl53TlF /tmp/tmp.VK3skISE4e ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LIRiJtJtIP +++ mktemp ++ local LAST_ERR=/tmp/tmp.GmD8H5r6XW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.LIRiJtJtIP ++ cat /tmp/tmp.GmD8H5r6XW ++ rm /tmp/tmp.LIRiJtJtIP /tmp/tmp.GmD8H5r6XW ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.kUpp9q60Rx ++ mktemp + local LAST_ERR=/tmp/tmp.jxJp2uCe2z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.kUpp9q60Rx + cat /tmp/tmp.jxJp2uCe2z + rm /tmp/tmp.kUpp9q60Rx /tmp/tmp.jxJp2uCe2z + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.QxgF0rKXHT/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FVSF6yKMbv +++ mktemp ++ local LAST_ERR=/tmp/tmp.oCuwe2ERFV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.FVSF6yKMbv ++ cat /tmp/tmp.oCuwe2ERFV ++ rm /tmp/tmp.FVSF6yKMbv /tmp/tmp.oCuwe2ERFV ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.9GH8cl5kIE ++ mktemp + local LAST_ERR=/tmp/tmp.uD0fil2IKx + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.9GH8cl5kIE + cat /tmp/tmp.uD0fil2IKx + rm /tmp/tmp.9GH8cl5kIE /tmp/tmp.uD0fil2IKx + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.QxgF0rKXHT/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-16352 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-16352 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-16352 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k3w3Ief8dm +++ mktemp ++ local LAST_ERR=/tmp/tmp.gib8VwnTY2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.k3w3Ief8dm ++ cat /tmp/tmp.gib8VwnTY2 ++ rm /tmp/tmp.k3w3Ief8dm /tmp/tmp.gib8VwnTY2 ++ return 0 + local client_container=psmdb-client-7469665986-hddcr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.1QGr5N6utG ++ mktemp + local LAST_ERR=/tmp/tmp.3jGHfoBC6z + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-hddcr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-16352.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.1QGr5N6utG + cat /tmp/tmp.3jGHfoBC6z + rm /tmp/tmp.1QGr5N6utG /tmp/tmp.3jGHfoBC6z + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1548/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.QxgF0rKXHT/find2 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- secrets=YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 pods=minio-service-57dd49b-mhr7q psmdb-client-7469665986-hddcr some-name-cfg-0 some-name-cfg-1 some-name-cfg-2 some-name-mongos-0 some-name-mongos-1 some-name-mongos-2 some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-minio-service-57dd49b-mhr7q-minio.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-psmdb-client-7469665986-hddcr-psmdb-client.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-0-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-0-cfg-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-0-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-1-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-1-cfg-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-1-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-2-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-2-cfg-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-cfg-2-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-mongos-0-mongos.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-mongos-0-mongos-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-mongos-1-mongos.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-mongos-1-mongos-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-mongos-2-mongos.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-mongos-2-mongos-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs0-0-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs0-0-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs0-1-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs0-1-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs0-2-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs0-2-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs1-0-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs1-0-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs1-1-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs1-1-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs1-2-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs1-2-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-0-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-0-rs-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-0-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-1-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-1-rs-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-1-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-2-mongod.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-2-rs-sidecar-1.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-some-name-rs2-2-backup-agent.txt logs saved in: /tmp/tmp.QxgF0rKXHT/logs_output-percona-server-mongodb-operator-7f9f6d57f8-zr9hr-percona-server-mongodb-operator.txt runtimeclass.node.k8s.io "container-rc" deleted ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted