++ echo 'Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/logs/demand-backup-sharded.log' Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/logs/demand-backup-sharded.log ++ '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/conf/cloud-secret.yml ']' ++ SKIP_BACKUPS_TO_AWS_GCP_AZURE= ++ oc get projects ++ kubectl get nodes ++ grep '^minikube' +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep '\-eks\-' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' ']' ++ EKS=0 +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep gke WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' v1.26.15-gke.1243000 ']' ++ GKE=1 +++ /usr/bin/sed -r 's/[^0-9.]+//g' +++ jq -r '.serverVersion.major + "." + .serverVersion.minor' +++ kubectl version -o json WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ KUBE_VERSION=1.26 + set_debug + [[ 0 == 1 ]] + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- namespace/psmdb-operator created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1542-e268e038-1-cluster2" modified. ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created deployment.apps/percona-server-mongodb-operator created waiting for pod/percona-server-mongodb-operator-fd9c6c884-sp8x6 to be ready.OK ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-sharded-5449 ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace demand-backup-sharded-5449 ----------------------------------------------------------------------------------- namespace/demand-backup-sharded-5449 created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1542-e268e038-1-cluster2" modified. ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- Error: uninstall: Release not loaded: minio-service: release: not found "minio" has been removed from your repositories "minio" has been added to your repositories NAME: minio-service LAST DEPLOYED: Mon May 6 08:52:23 2024 NAMESPACE: demand-backup-sharded-5449 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-sharded-5449.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-sharded-5449 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-sharded-5449 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-sharded-5449 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-sharded-5449 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local waiting for pod/minio-service-57dd49b-pbrpn to be ready.OK service/minio-service created make_bucket: operator-testing pod "aws-cli" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: error attaching to container: container is in CONTAINER_EXITED state ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- secret/some-users created deployment.apps/psmdb-client created ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created runtimeclass.node.k8s.io/container-rc unchanged perconaservermongodb.psmdb.percona.com/some-name created ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- waiting for pod/some-name-rs0-0 to be ready.........OK waiting for pod/some-name-rs0-1 to be ready...........OK waiting for pod/some-name-rs0-2 to be ready.....OK Waiting for cluster readyness............................................ waiting for pod/some-name-cfg-0 to be ready.OK waiting for pod/some-name-cfg-1 to be ready.OK waiting for pod/some-name-cfg-2 to be ready.OK waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- secret/some-name-mongos created waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness.................................................................................................... ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("44912e13-3a6e-447c-9e57-f3b5ba3242d2") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" }, { "db" : "myApp1", "role" : "readWrite" }, { "db" : "myApp2", "role" : "readWrite" } ] } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3b23ba8a-bd5a-444c-b555-bbd4488128ad") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714986089, 21), "signature" : { "hash" : BinData(0,"Pu8Ud9V4KWHdpmkwJGwnFQ/NacE="), "keyId" : NumberLong("7365807400118386711") } }, "operationTime" : Timestamp(1714986089, 15) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("f0c4542f-e83e-471c-b17b-3b205006d48c") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714986093, 9), "signature" : { "hash" : BinData(0,"1g+3HdZC9V6OaNXffPhbVX5D1JQ="), "keyId" : NumberLong("7365807400118386711") } }, "operationTime" : Timestamp(1714986093, 4) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("e2efcc99-292d-43b9-9ca9-4e254e7b5df1") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714986096, 12), "signature" : { "hash" : BinData(0,"mvMmN7ubVOKTrG67apPmuK/9QOA="), "keyId" : NumberLong("7365807400118386711") } }, "operationTime" : Timestamp(1714986096, 7) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("1fe36a60-6743-46c4-9624-3d4f36af96bc") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("d3cec9e8-7775-4ffd-bad9-2c7e452e47ce") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("c6a9c73a-e0af-42f9-a93a-44eec61cb1d6") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-minio created ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created backup-aws-s3..................................................................... backup-gcp-cs................... backup-azure-blob...................... backup-minio. ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("ae695557-a1ee-4ffa-b2b2-c69bf8971456") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("6964507c-d9ac-4401-a79c-650f413f45df") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("e672862a-76d2-46a2-9184-a7b2b4c41f25") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created waiting psmdb-restore/backup-aws-s3 to reach ready state............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JsnZXvu5wc +++ mktemp ++ local LAST_ERR=/tmp/tmp.mFwhYKQaIP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.JsnZXvu5wc ++ cat /tmp/tmp.mFwhYKQaIP ++ rm /tmp/tmp.JsnZXvu5wc /tmp/tmp.mFwhYKQaIP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BoJyMy2J3U +++ mktemp ++ local LAST_ERR=/tmp/tmp.P4lqOXG4AP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.BoJyMy2J3U ++ cat /tmp/tmp.P4lqOXG4AP ++ rm /tmp/tmp.BoJyMy2J3U /tmp/tmp.P4lqOXG4AP ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oG5Nvf9cen +++ mktemp ++ local LAST_ERR=/tmp/tmp.sSET3WEGUW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.oG5Nvf9cen ++ cat /tmp/tmp.sSET3WEGUW ++ rm /tmp/tmp.oG5Nvf9cen /tmp/tmp.sSET3WEGUW ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PVfWgkd575 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CC3IHzd6CS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.PVfWgkd575 ++ cat /tmp/tmp.CC3IHzd6CS ++ rm /tmp/tmp.PVfWgkd575 /tmp/tmp.CC3IHzd6CS ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zP2efElfSk +++ mktemp ++ local LAST_ERR=/tmp/tmp.UoRsZYUwrO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.zP2efElfSk ++ cat /tmp/tmp.UoRsZYUwrO ++ rm /tmp/tmp.zP2efElfSk /tmp/tmp.UoRsZYUwrO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lBVQGaJycN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ageqLBxnRf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.lBVQGaJycN ++ cat /tmp/tmp.ageqLBxnRf ++ rm /tmp/tmp.lBVQGaJycN /tmp/tmp.ageqLBxnRf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eZvrdSkvcd +++ mktemp ++ local LAST_ERR=/tmp/tmp.2XKQGabQIn ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.eZvrdSkvcd ++ cat /tmp/tmp.2XKQGabQIn ++ rm /tmp/tmp.eZvrdSkvcd /tmp/tmp.2XKQGabQIn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jAPIQa8cbT +++ mktemp ++ local LAST_ERR=/tmp/tmp.2r4k3zeCLE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.jAPIQa8cbT ++ cat /tmp/tmp.2r4k3zeCLE ++ rm /tmp/tmp.jAPIQa8cbT /tmp/tmp.2r4k3zeCLE ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UgzvTxPwtx +++ mktemp ++ local LAST_ERR=/tmp/tmp.BQT437z8NS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.UgzvTxPwtx ++ cat /tmp/tmp.BQT437z8NS ++ rm /tmp/tmp.UgzvTxPwtx /tmp/tmp.BQT437z8NS ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.TcDSclSUsk ++ mktemp + local LAST_ERR=/tmp/tmp.DHD5Elg1GV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.TcDSclSUsk + cat /tmp/tmp.DHD5Elg1GV + rm /tmp/tmp.TcDSclSUsk /tmp/tmp.DHD5Elg1GV + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.MJN1C1m0Iw/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3PjosnSCF7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.r0GfZuKnTf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.3PjosnSCF7 ++ cat /tmp/tmp.r0GfZuKnTf ++ rm /tmp/tmp.3PjosnSCF7 /tmp/tmp.r0GfZuKnTf ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.gzI0iqgUVo ++ mktemp + local LAST_ERR=/tmp/tmp.KVe3l8jPQ0 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.gzI0iqgUVo + cat /tmp/tmp.KVe3l8jPQ0 + rm /tmp/tmp.gzI0iqgUVo /tmp/tmp.KVe3l8jPQ0 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.MJN1C1m0Iw/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rtp0967YXz +++ mktemp ++ local LAST_ERR=/tmp/tmp.4rPEr1FCJe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.rtp0967YXz ++ cat /tmp/tmp.4rPEr1FCJe ++ rm /tmp/tmp.rtp0967YXz /tmp/tmp.4rPEr1FCJe ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.6T0Bxtu06P ++ mktemp + local LAST_ERR=/tmp/tmp.MtjqbXyjXS + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.6T0Bxtu06P + cat /tmp/tmp.MtjqbXyjXS + rm /tmp/tmp.6T0Bxtu06P /tmp/tmp.MtjqbXyjXS + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.MJN1C1m0Iw/find2 + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("b13b51bc-cd63-4adf-ae6a-a8e1c1f47b63") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("2791647c-0eb2-4180-83c6-8e056de770c6") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("6709d9d1-ed4f-4c0c-a693-0df060332ed3") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created waiting psmdb-restore/backup-gcp-cs to reach ready state............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JZyRa2woX1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.759EYendq6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.JZyRa2woX1 ++ cat /tmp/tmp.759EYendq6 ++ rm /tmp/tmp.JZyRa2woX1 /tmp/tmp.759EYendq6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xOsXgbD9hW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lu54DCukHj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.xOsXgbD9hW ++ cat /tmp/tmp.Lu54DCukHj ++ rm /tmp/tmp.xOsXgbD9hW /tmp/tmp.Lu54DCukHj ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QRNQerRcEI +++ mktemp ++ local LAST_ERR=/tmp/tmp.OUwzQmIe6w ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.QRNQerRcEI ++ cat /tmp/tmp.OUwzQmIe6w ++ rm /tmp/tmp.QRNQerRcEI /tmp/tmp.OUwzQmIe6w ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5EdLn119OK +++ mktemp ++ local LAST_ERR=/tmp/tmp.qG1FdDqkPd ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.5EdLn119OK ++ cat /tmp/tmp.qG1FdDqkPd ++ rm /tmp/tmp.5EdLn119OK /tmp/tmp.qG1FdDqkPd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oNFBw4DwvA +++ mktemp ++ local LAST_ERR=/tmp/tmp.1iITtCn1PN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.oNFBw4DwvA ++ cat /tmp/tmp.1iITtCn1PN ++ rm /tmp/tmp.oNFBw4DwvA /tmp/tmp.1iITtCn1PN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jcvq476yZy +++ mktemp ++ local LAST_ERR=/tmp/tmp.7GgjGyPLO0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.jcvq476yZy ++ cat /tmp/tmp.7GgjGyPLO0 ++ rm /tmp/tmp.jcvq476yZy /tmp/tmp.7GgjGyPLO0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a99LEchbZF +++ mktemp ++ local LAST_ERR=/tmp/tmp.cAZLljxt0F ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.a99LEchbZF ++ cat /tmp/tmp.cAZLljxt0F ++ rm /tmp/tmp.a99LEchbZF /tmp/tmp.cAZLljxt0F ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TjQiNy1q7Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.fkzKZhw5wo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.TjQiNy1q7Q ++ cat /tmp/tmp.fkzKZhw5wo ++ rm /tmp/tmp.TjQiNy1q7Q /tmp/tmp.fkzKZhw5wo ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xVZDh72ZgD +++ mktemp ++ local LAST_ERR=/tmp/tmp.gZcvPfrQ7G ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.xVZDh72ZgD ++ cat /tmp/tmp.gZcvPfrQ7G ++ rm /tmp/tmp.xVZDh72ZgD /tmp/tmp.gZcvPfrQ7G ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.HhlnzkjTWZ ++ mktemp + local LAST_ERR=/tmp/tmp.oyLkNgl4Bj + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.HhlnzkjTWZ + cat /tmp/tmp.oyLkNgl4Bj + rm /tmp/tmp.HhlnzkjTWZ /tmp/tmp.oyLkNgl4Bj + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.MJN1C1m0Iw/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.xWYcPCbnHN +++ mktemp ++ local LAST_ERR=/tmp/tmp.WWAmoLBR3g ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.xWYcPCbnHN ++ cat /tmp/tmp.WWAmoLBR3g ++ rm /tmp/tmp.xWYcPCbnHN /tmp/tmp.WWAmoLBR3g ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.cRRYRQyxnt ++ mktemp + local LAST_ERR=/tmp/tmp.eOMZIvXnkV + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.cRRYRQyxnt + cat /tmp/tmp.eOMZIvXnkV + rm /tmp/tmp.cRRYRQyxnt /tmp/tmp.eOMZIvXnkV + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.MJN1C1m0Iw/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sYDvJYlIfe +++ mktemp ++ local LAST_ERR=/tmp/tmp.6JW75uvuhZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.sYDvJYlIfe ++ cat /tmp/tmp.6JW75uvuhZ ++ rm /tmp/tmp.sYDvJYlIfe /tmp/tmp.6JW75uvuhZ ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.TmiswzKPF9 ++ mktemp + local LAST_ERR=/tmp/tmp.nPPxfdLSZs + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.TmiswzKPF9 + cat /tmp/tmp.nPPxfdLSZs + rm /tmp/tmp.TmiswzKPF9 /tmp/tmp.nPPxfdLSZs + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.MJN1C1m0Iw/find2 + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("dffdaa39-c078-4204-8af7-8780b5958433") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("6081b1eb-b495-4acd-97fb-3a96c069b4e4") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("a3eb413a-2ced-4d3d-ab4a-8b965a30eff9") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created waiting psmdb-restore/backup-azure-blob to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KjT3RBqpCj +++ mktemp ++ local LAST_ERR=/tmp/tmp.niKG9amHyN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.KjT3RBqpCj ++ cat /tmp/tmp.niKG9amHyN ++ rm /tmp/tmp.KjT3RBqpCj /tmp/tmp.niKG9amHyN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2Y9y9tWQQR +++ mktemp ++ local LAST_ERR=/tmp/tmp.PdjUKWzwBe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.2Y9y9tWQQR ++ cat /tmp/tmp.PdjUKWzwBe ++ rm /tmp/tmp.2Y9y9tWQQR /tmp/tmp.PdjUKWzwBe ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x13s2o7qoj +++ mktemp ++ local LAST_ERR=/tmp/tmp.eHxtumOtS6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.x13s2o7qoj ++ cat /tmp/tmp.eHxtumOtS6 ++ rm /tmp/tmp.x13s2o7qoj /tmp/tmp.eHxtumOtS6 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oF6PTLbuee +++ mktemp ++ local LAST_ERR=/tmp/tmp.gGLQDb9nGs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.oF6PTLbuee ++ cat /tmp/tmp.gGLQDb9nGs ++ rm /tmp/tmp.oF6PTLbuee /tmp/tmp.gGLQDb9nGs ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZG2eCJ0lXU +++ mktemp ++ local LAST_ERR=/tmp/tmp.WyKCmysvKZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ZG2eCJ0lXU ++ cat /tmp/tmp.WyKCmysvKZ ++ rm /tmp/tmp.ZG2eCJ0lXU /tmp/tmp.WyKCmysvKZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ORsKccAH7I +++ mktemp ++ local LAST_ERR=/tmp/tmp.L2mnEnEpDu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ORsKccAH7I ++ cat /tmp/tmp.L2mnEnEpDu ++ rm /tmp/tmp.ORsKccAH7I /tmp/tmp.L2mnEnEpDu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b9a1FDeE0Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.UUP5Sm8L7z ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.b9a1FDeE0Q ++ cat /tmp/tmp.UUP5Sm8L7z ++ rm /tmp/tmp.b9a1FDeE0Q /tmp/tmp.UUP5Sm8L7z ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qwayReWD5N +++ mktemp ++ local LAST_ERR=/tmp/tmp.w72cItzdym ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.qwayReWD5N ++ cat /tmp/tmp.w72cItzdym ++ rm /tmp/tmp.qwayReWD5N /tmp/tmp.w72cItzdym ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ctn67IaErR +++ mktemp ++ local LAST_ERR=/tmp/tmp.CnmFH3mITs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ctn67IaErR ++ cat /tmp/tmp.CnmFH3mITs ++ rm /tmp/tmp.ctn67IaErR /tmp/tmp.CnmFH3mITs ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.1BYBGbkBD3 ++ mktemp + local LAST_ERR=/tmp/tmp.EHxLB5QHON + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.1BYBGbkBD3 + cat /tmp/tmp.EHxLB5QHON + rm /tmp/tmp.1BYBGbkBD3 /tmp/tmp.EHxLB5QHON + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.MJN1C1m0Iw/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MmQXhI6xYR +++ mktemp ++ local LAST_ERR=/tmp/tmp.YHkzc9B9Qh ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.MmQXhI6xYR ++ cat /tmp/tmp.YHkzc9B9Qh ++ rm /tmp/tmp.MmQXhI6xYR /tmp/tmp.YHkzc9B9Qh ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.hCZoKWObsH ++ mktemp + local LAST_ERR=/tmp/tmp.JWLnqKz1p9 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.hCZoKWObsH + cat /tmp/tmp.JWLnqKz1p9 + rm /tmp/tmp.hCZoKWObsH /tmp/tmp.JWLnqKz1p9 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.MJN1C1m0Iw/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.akUCEv4ddb +++ mktemp ++ local LAST_ERR=/tmp/tmp.5aJKfMC103 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.akUCEv4ddb ++ cat /tmp/tmp.5aJKfMC103 ++ rm /tmp/tmp.akUCEv4ddb /tmp/tmp.5aJKfMC103 ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.PjkD8LkiOx ++ mktemp + local LAST_ERR=/tmp/tmp.JLSh7oLJPp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.PjkD8LkiOx + cat /tmp/tmp.JLSh7oLJPp + rm /tmp/tmp.PjkD8LkiOx /tmp/tmp.JLSh7oLJPp + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.MJN1C1m0Iw/find2 + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- 2024-05-06 09:03:01 55 myApp.test.gz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("981b828c-691f-44d5-8fd4-2f0a6744cd8e") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("223860cb-2cad-48cd-81ee-524e385adadf") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-5449.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("31da59c1-1ea9-4640-880a-aed8c3cdaef3") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created waiting psmdb-restore/backup-minio to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6311J2X69w +++ mktemp ++ local LAST_ERR=/tmp/tmp.P3Jidu09TK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.6311J2X69w ++ cat /tmp/tmp.P3Jidu09TK ++ rm /tmp/tmp.6311J2X69w /tmp/tmp.P3Jidu09TK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VvGjrm7F11 +++ mktemp ++ local LAST_ERR=/tmp/tmp.79LVU5nDUV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.VvGjrm7F11 ++ cat /tmp/tmp.79LVU5nDUV ++ rm /tmp/tmp.VvGjrm7F11 /tmp/tmp.79LVU5nDUV ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aE0ekZfU6K +++ mktemp ++ local LAST_ERR=/tmp/tmp.iKmOdWNyng ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.aE0ekZfU6K ++ cat /tmp/tmp.iKmOdWNyng ++ rm /tmp/tmp.aE0ekZfU6K /tmp/tmp.iKmOdWNyng ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CGnZgbvrwS +++ mktemp ++ local LAST_ERR=/tmp/tmp.vXSn2eFAaz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.CGnZgbvrwS ++ cat /tmp/tmp.vXSn2eFAaz ++ rm /tmp/tmp.CGnZgbvrwS /tmp/tmp.vXSn2eFAaz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h4eKJhKtJO +++ mktemp ++ local LAST_ERR=/tmp/tmp.2fdtr94aRJ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.h4eKJhKtJO ++ cat /tmp/tmp.2fdtr94aRJ ++ rm /tmp/tmp.h4eKJhKtJO /tmp/tmp.2fdtr94aRJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BvW6WnOzzn +++ mktemp ++ local LAST_ERR=/tmp/tmp.oUe9trvgE0 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.BvW6WnOzzn ++ cat /tmp/tmp.oUe9trvgE0 ++ rm /tmp/tmp.BvW6WnOzzn /tmp/tmp.oUe9trvgE0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C4cXF62oRY +++ mktemp ++ local LAST_ERR=/tmp/tmp.Dh1LcJGzZ4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.C4cXF62oRY ++ cat /tmp/tmp.Dh1LcJGzZ4 ++ rm /tmp/tmp.C4cXF62oRY /tmp/tmp.Dh1LcJGzZ4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NlBME2byVT +++ mktemp ++ local LAST_ERR=/tmp/tmp.C19rZU1b4S ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.NlBME2byVT ++ cat /tmp/tmp.C19rZU1b4S ++ rm /tmp/tmp.NlBME2byVT /tmp/tmp.C19rZU1b4S ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tx6R3OMbNv +++ mktemp ++ local LAST_ERR=/tmp/tmp.DLAZ5MWP3g ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.tx6R3OMbNv ++ cat /tmp/tmp.DLAZ5MWP3g ++ rm /tmp/tmp.tx6R3OMbNv /tmp/tmp.DLAZ5MWP3g ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.X4UWIkKV6b ++ mktemp + local LAST_ERR=/tmp/tmp.igdHNsy7kn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.X4UWIkKV6b + cat /tmp/tmp.igdHNsy7kn + rm /tmp/tmp.X4UWIkKV6b /tmp/tmp.igdHNsy7kn + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.MJN1C1m0Iw/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T8WISeOary +++ mktemp ++ local LAST_ERR=/tmp/tmp.rHTt5mSAef ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.T8WISeOary ++ cat /tmp/tmp.rHTt5mSAef ++ rm /tmp/tmp.T8WISeOary /tmp/tmp.rHTt5mSAef ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.cMIuM4jwJD ++ mktemp + local LAST_ERR=/tmp/tmp.VDpo0I052P + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.cMIuM4jwJD + cat /tmp/tmp.VDpo0I052P + rm /tmp/tmp.cMIuM4jwJD /tmp/tmp.VDpo0I052P + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.MJN1C1m0Iw/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-5449 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-5449 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-5449 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0vJDA29D7d +++ mktemp ++ local LAST_ERR=/tmp/tmp.w7dfSbRhfi ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.0vJDA29D7d ++ cat /tmp/tmp.w7dfSbRhfi ++ rm /tmp/tmp.0vJDA29D7d /tmp/tmp.w7dfSbRhfi ++ return 0 + local client_container=psmdb-client-5dc94d5b48-vh8lp + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.43hJxHRTKV ++ mktemp + local LAST_ERR=/tmp/tmp.cHwTWNbAqA + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-vh8lp -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-5449.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.43hJxHRTKV + cat /tmp/tmp.cHwTWNbAqA + rm /tmp/tmp.43hJxHRTKV /tmp/tmp.cHwTWNbAqA + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1542/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.MJN1C1m0Iw/find2 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- secrets=YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 pods=minio-service-57dd49b-pbrpn psmdb-client-5dc94d5b48-vh8lp some-name-cfg-0 some-name-cfg-1 some-name-cfg-2 some-name-mongos-0 some-name-mongos-1 some-name-mongos-2 some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-minio-service-57dd49b-pbrpn-minio.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-psmdb-client-5dc94d5b48-vh8lp-psmdb-client.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-0-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-0-cfg-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-0-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-1-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-1-cfg-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-1-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-2-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-2-cfg-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-cfg-2-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-mongos-0-mongos.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-mongos-0-mongos-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-mongos-1-mongos.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-mongos-1-mongos-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-mongos-2-mongos.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-mongos-2-mongos-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs0-0-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs0-0-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs0-1-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs0-1-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs0-2-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs0-2-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs1-0-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs1-0-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs1-1-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs1-1-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs1-2-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs1-2-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-0-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-0-rs-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-0-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-1-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-1-rs-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-1-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-2-mongod.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-2-rs-sidecar-1.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-some-name-rs2-2-backup-agent.txt logs saved in: /tmp/tmp.MJN1C1m0Iw/logs_output-percona-server-mongodb-operator-fd9c6c884-sp8x6-percona-server-mongodb-operator.txt runtimeclass.node.k8s.io "container-rc" deleted ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' E0506 09:17:08.185760 29705 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-10-0: the server could not find the requested resource E0506 09:17:08.186676 29705 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-12-0: the server could not find the requested resource E0506 09:17:08.190035 29705 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-11-0: the server could not find the requested resource E0506 09:17:08.191023 29705 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1: the server could not find the requested resource error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted