++ echo 'Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/logs/demand-backup-sharded.log' Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/logs/demand-backup-sharded.log ++ '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/conf/cloud-secret.yml ']' ++ SKIP_BACKUPS_TO_AWS_GCP_AZURE= ++ oc get projects ++ grep '^minikube' ++ kubectl get nodes +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep '\-eks\-' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' ']' ++ EKS=0 +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep gke WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' v1.26.15-gke.1381000 ']' ++ GKE=1 +++ kubectl version -o json +++ jq -r '.serverVersion.major + "." + .serverVersion.minor' +++ /usr/bin/sed -r 's/[^0-9.]+//g' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ KUBE_VERSION=1.26 + set_debug + [[ 0 == 1 ]] + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- namespace/psmdb-operator created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1559-6384b519-2-cluster2" modified. ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created deployment.apps/percona-server-mongodb-operator created waiting for pod/percona-server-mongodb-operator-54f884cbc-28rjc to be ready.OK ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-sharded-18981 ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace demand-backup-sharded-18981 ----------------------------------------------------------------------------------- namespace/demand-backup-sharded-18981 created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1559-6384b519-2-cluster2" modified. ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- Error: uninstall: Release not loaded: minio-service: release: not found "minio" has been removed from your repositories "minio" has been added to your repositories NAME: minio-service LAST DEPLOYED: Tue Jun 4 14:37:26 2024 NAMESPACE: demand-backup-sharded-18981 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-sharded-18981.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-sharded-18981 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-sharded-18981 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-sharded-18981 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-sharded-18981 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local waiting for pod/minio-service-57dd49b-hgf4t to be ready.OK service/minio-service created make_bucket: operator-testing pod "aws-cli" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: error attaching to container: container is in CONTAINER_EXITED state ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- secret/some-users created deployment.apps/psmdb-client created ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created runtimeclass.node.k8s.io/container-rc unchanged perconaservermongodb.psmdb.percona.com/some-name created ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- waiting for pod/some-name-rs0-0 to be ready.........OK waiting for pod/some-name-rs0-1 to be ready..........OK waiting for pod/some-name-rs0-2 to be ready.....OK Waiting for cluster readyness.................................................. waiting for pod/some-name-cfg-0 to be ready.OK waiting for pod/some-name-cfg-1 to be ready.OK waiting for pod/some-name-cfg-2 to be ready.OK waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- secret/some-name-mongos created waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness............................................................................................. ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("493c6f90-af5a-40e0-8b3a-693c91c841d3") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" }, { "db" : "myApp1", "role" : "readWrite" }, { "db" : "myApp2", "role" : "readWrite" } ] } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("35d68160-e3d2-4447-952b-91a30d2c41dd") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1717512374, 8), "signature" : { "hash" : BinData(0,"cbaJDGqhOTgroi9mBJaHw7lbwN4="), "keyId" : NumberLong("7376657754523435017") } }, "operationTime" : Timestamp(1717512374, 2) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("0df9bac6-e3dd-4c23-9e45-6abd6a354bd4") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1717512377, 13), "signature" : { "hash" : BinData(0,"t35Mah+EO1Y2qw6at/zzZO3IwH0="), "keyId" : NumberLong("7376657754523435017") } }, "operationTime" : Timestamp(1717512377, 8) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("30f78c03-2f41-465b-a331-1c722a878769") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1717512381, 9), "signature" : { "hash" : BinData(0,"w1rSo6kD9nCIRkTBK4qUB3fvswg="), "keyId" : NumberLong("7376657754523435017") } }, "operationTime" : Timestamp(1717512381, 4) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3efd28e1-98f9-4d1c-94f0-2e68cd093498") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("5c3ebe7b-5228-4367-911d-8d77dee82655") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("02f90bb8-f81c-4121-a01a-40cc39ae5a1e") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-minio created ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created backup-aws-s3....................................... backup-gcp-cs................... backup-azure-blob....................................... backup-minio. ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("606a7400-eff6-476b-b0fc-5f396467b05a") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("dfcff2b0-8008-407e-92c4-c16187515c83") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9b5bf992-f8e3-4cf0-8c28-e636e9269f68") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created waiting psmdb-restore/backup-aws-s3 to reach ready state................. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5a7fdi0lWY +++ mktemp ++ local LAST_ERR=/tmp/tmp.KQz4NGlIIM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.5a7fdi0lWY ++ cat /tmp/tmp.KQz4NGlIIM ++ rm /tmp/tmp.5a7fdi0lWY /tmp/tmp.KQz4NGlIIM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kB3gcZyMuA +++ mktemp ++ local LAST_ERR=/tmp/tmp.xerbIhvJSY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.kB3gcZyMuA ++ cat /tmp/tmp.xerbIhvJSY ++ rm /tmp/tmp.kB3gcZyMuA /tmp/tmp.xerbIhvJSY ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wh9Qy1Wjvx +++ mktemp ++ local LAST_ERR=/tmp/tmp.lcx8s2gVfT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.wh9Qy1Wjvx ++ cat /tmp/tmp.lcx8s2gVfT ++ rm /tmp/tmp.wh9Qy1Wjvx /tmp/tmp.lcx8s2gVfT ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K7fl8T0Ydm +++ mktemp ++ local LAST_ERR=/tmp/tmp.iDHjb1WNDm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.K7fl8T0Ydm ++ cat /tmp/tmp.iDHjb1WNDm ++ rm /tmp/tmp.K7fl8T0Ydm /tmp/tmp.iDHjb1WNDm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xAtol7rSEM +++ mktemp ++ local LAST_ERR=/tmp/tmp.AZ8fMmgE6X ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.xAtol7rSEM ++ cat /tmp/tmp.AZ8fMmgE6X ++ rm /tmp/tmp.xAtol7rSEM /tmp/tmp.AZ8fMmgE6X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SWMeuuEIQ9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mBPWn0TFhz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.SWMeuuEIQ9 ++ cat /tmp/tmp.mBPWn0TFhz ++ rm /tmp/tmp.SWMeuuEIQ9 /tmp/tmp.mBPWn0TFhz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vNK5xYqd7k +++ mktemp ++ local LAST_ERR=/tmp/tmp.mMK6qcUeXr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.vNK5xYqd7k ++ cat /tmp/tmp.mMK6qcUeXr ++ rm /tmp/tmp.vNK5xYqd7k /tmp/tmp.mMK6qcUeXr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r8V6dxZIY6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.bPsy9Csinu ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.r8V6dxZIY6 ++ cat /tmp/tmp.bPsy9Csinu ++ rm /tmp/tmp.r8V6dxZIY6 /tmp/tmp.bPsy9Csinu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1qXu9cj2eB +++ mktemp ++ local LAST_ERR=/tmp/tmp.7q42ZzdsQr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.1qXu9cj2eB ++ cat /tmp/tmp.7q42ZzdsQr ++ rm /tmp/tmp.1qXu9cj2eB /tmp/tmp.7q42ZzdsQr ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZPl4yBz6oR +++ mktemp ++ local LAST_ERR=/tmp/tmp.nBgITYOdVY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ZPl4yBz6oR ++ cat /tmp/tmp.nBgITYOdVY ++ rm /tmp/tmp.ZPl4yBz6oR /tmp/tmp.nBgITYOdVY ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.heUm2A2h1S ++ mktemp + local LAST_ERR=/tmp/tmp.sCqiRZdMo2 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.heUm2A2h1S + cat /tmp/tmp.sCqiRZdMo2 + rm /tmp/tmp.heUm2A2h1S /tmp/tmp.sCqiRZdMo2 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.nszFGcQl8i/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AG1v5UkiEm +++ mktemp ++ local LAST_ERR=/tmp/tmp.2i88TyQ8t7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.AG1v5UkiEm ++ cat /tmp/tmp.2i88TyQ8t7 ++ rm /tmp/tmp.AG1v5UkiEm /tmp/tmp.2i88TyQ8t7 ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.wszNsmvn8d ++ mktemp + local LAST_ERR=/tmp/tmp.sSYrEYSnqr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.wszNsmvn8d + cat /tmp/tmp.sSYrEYSnqr + rm /tmp/tmp.wszNsmvn8d /tmp/tmp.sSYrEYSnqr + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.nszFGcQl8i/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PpA9euATD3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.UbEF022QrZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.PpA9euATD3 ++ cat /tmp/tmp.UbEF022QrZ ++ rm /tmp/tmp.PpA9euATD3 /tmp/tmp.UbEF022QrZ ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.d49AUl3JTh ++ mktemp + local LAST_ERR=/tmp/tmp.GE3CgVGQl8 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.d49AUl3JTh + cat /tmp/tmp.GE3CgVGQl8 + rm /tmp/tmp.d49AUl3JTh /tmp/tmp.GE3CgVGQl8 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.nszFGcQl8i/find2 + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("898f7d9c-992a-4afb-9a05-1df819647b6a") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("afbad42c-7c98-440d-8812-fa80a49a2aab") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("da68b693-3614-40fa-8f27-446f0042b200") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created waiting psmdb-restore/backup-gcp-cs to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Sr0auqNfru +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZUDmrLUoLf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Sr0auqNfru ++ cat /tmp/tmp.ZUDmrLUoLf ++ rm /tmp/tmp.Sr0auqNfru /tmp/tmp.ZUDmrLUoLf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RjYw5yEji9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.91doHdK8UT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.RjYw5yEji9 ++ cat /tmp/tmp.91doHdK8UT ++ rm /tmp/tmp.RjYw5yEji9 /tmp/tmp.91doHdK8UT ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9jnnFngDzZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.XfmyOJZuIv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.9jnnFngDzZ ++ cat /tmp/tmp.XfmyOJZuIv ++ rm /tmp/tmp.9jnnFngDzZ /tmp/tmp.XfmyOJZuIv ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hJ6KpwkdXH +++ mktemp ++ local LAST_ERR=/tmp/tmp.CWuhuwBpUa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.hJ6KpwkdXH ++ cat /tmp/tmp.CWuhuwBpUa ++ rm /tmp/tmp.hJ6KpwkdXH /tmp/tmp.CWuhuwBpUa ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lHPOfR6A0N +++ mktemp ++ local LAST_ERR=/tmp/tmp.m2I8HqTBFq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.lHPOfR6A0N ++ cat /tmp/tmp.m2I8HqTBFq ++ rm /tmp/tmp.lHPOfR6A0N /tmp/tmp.m2I8HqTBFq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.stVKzcZKBW +++ mktemp ++ local LAST_ERR=/tmp/tmp.qZTDcizqdj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.stVKzcZKBW ++ cat /tmp/tmp.qZTDcizqdj ++ rm /tmp/tmp.stVKzcZKBW /tmp/tmp.qZTDcizqdj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iazFsxXsKF +++ mktemp ++ local LAST_ERR=/tmp/tmp.JkjrKLqSO4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.iazFsxXsKF ++ cat /tmp/tmp.JkjrKLqSO4 ++ rm /tmp/tmp.iazFsxXsKF /tmp/tmp.JkjrKLqSO4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qxu7QMamDB +++ mktemp ++ local LAST_ERR=/tmp/tmp.TdmuJ2uBlP ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.qxu7QMamDB ++ cat /tmp/tmp.TdmuJ2uBlP ++ rm /tmp/tmp.qxu7QMamDB /tmp/tmp.TdmuJ2uBlP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s3w1AoUp2q +++ mktemp ++ local LAST_ERR=/tmp/tmp.FyDU0guhFK ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.s3w1AoUp2q ++ cat /tmp/tmp.FyDU0guhFK ++ rm /tmp/tmp.s3w1AoUp2q /tmp/tmp.FyDU0guhFK ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HCzImzvZtE +++ mktemp ++ local LAST_ERR=/tmp/tmp.WdsN6UvvOg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.HCzImzvZtE ++ cat /tmp/tmp.WdsN6UvvOg ++ rm /tmp/tmp.HCzImzvZtE /tmp/tmp.WdsN6UvvOg ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.jLTPDfQUya ++ mktemp + local LAST_ERR=/tmp/tmp.PWXoKWzlMy + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.jLTPDfQUya + cat /tmp/tmp.PWXoKWzlMy + rm /tmp/tmp.jLTPDfQUya /tmp/tmp.PWXoKWzlMy + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.nszFGcQl8i/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3lr2VSQNgk +++ mktemp ++ local LAST_ERR=/tmp/tmp.PHwrVLkX8I ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.3lr2VSQNgk ++ cat /tmp/tmp.PHwrVLkX8I ++ rm /tmp/tmp.3lr2VSQNgk /tmp/tmp.PHwrVLkX8I ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.q2HTyqWSLk ++ mktemp + local LAST_ERR=/tmp/tmp.zz5950UikU + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.q2HTyqWSLk + cat /tmp/tmp.zz5950UikU + rm /tmp/tmp.q2HTyqWSLk /tmp/tmp.zz5950UikU + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.nszFGcQl8i/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R8LOiamVKP +++ mktemp ++ local LAST_ERR=/tmp/tmp.h8CKMlwbIv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.R8LOiamVKP ++ cat /tmp/tmp.h8CKMlwbIv ++ rm /tmp/tmp.R8LOiamVKP /tmp/tmp.h8CKMlwbIv ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.gomIO5SwGd ++ mktemp + local LAST_ERR=/tmp/tmp.CRgz4SsKPG + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.gomIO5SwGd + cat /tmp/tmp.CRgz4SsKPG + rm /tmp/tmp.gomIO5SwGd /tmp/tmp.CRgz4SsKPG + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.nszFGcQl8i/find2 + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3b68fa52-ca37-407d-b44f-44415f6b62f2") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("715c7e3c-df56-4057-aa6e-b0456aa01628") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("92a04e06-f509-4cd0-8b96-9e434d1ca974") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created waiting psmdb-restore/backup-azure-blob to reach ready state............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tqGJMqC2zJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.eqPr94Oa0Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.tqGJMqC2zJ ++ cat /tmp/tmp.eqPr94Oa0Q ++ rm /tmp/tmp.tqGJMqC2zJ /tmp/tmp.eqPr94Oa0Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FsT1Fo1Nzh +++ mktemp ++ local LAST_ERR=/tmp/tmp.Io0imW7ATr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.FsT1Fo1Nzh ++ cat /tmp/tmp.Io0imW7ATr ++ rm /tmp/tmp.FsT1Fo1Nzh /tmp/tmp.Io0imW7ATr ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tSHj5qUI8s +++ mktemp ++ local LAST_ERR=/tmp/tmp.mnfDMLnK2t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.tSHj5qUI8s ++ cat /tmp/tmp.mnfDMLnK2t ++ rm /tmp/tmp.tSHj5qUI8s /tmp/tmp.mnfDMLnK2t ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vnXyUN2cQd +++ mktemp ++ local LAST_ERR=/tmp/tmp.76NuBbyXbZ ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.vnXyUN2cQd ++ cat /tmp/tmp.76NuBbyXbZ ++ rm /tmp/tmp.vnXyUN2cQd /tmp/tmp.76NuBbyXbZ ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.65vLT8rs4z +++ mktemp ++ local LAST_ERR=/tmp/tmp.BBfXBuTJMU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.65vLT8rs4z ++ cat /tmp/tmp.BBfXBuTJMU ++ rm /tmp/tmp.65vLT8rs4z /tmp/tmp.BBfXBuTJMU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M6yVxgI8cQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.4vTqTwdyQS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.M6yVxgI8cQ ++ cat /tmp/tmp.4vTqTwdyQS ++ rm /tmp/tmp.M6yVxgI8cQ /tmp/tmp.4vTqTwdyQS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YJG12mO9ET +++ mktemp ++ local LAST_ERR=/tmp/tmp.rCcjdjKiJm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.YJG12mO9ET ++ cat /tmp/tmp.rCcjdjKiJm ++ rm /tmp/tmp.YJG12mO9ET /tmp/tmp.rCcjdjKiJm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z8GzW2oitX +++ mktemp ++ local LAST_ERR=/tmp/tmp.gW6OpGHKET ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.z8GzW2oitX ++ cat /tmp/tmp.gW6OpGHKET ++ rm /tmp/tmp.z8GzW2oitX /tmp/tmp.gW6OpGHKET ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8QENw9djHd +++ mktemp ++ local LAST_ERR=/tmp/tmp.uahz6Ilrys ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.8QENw9djHd ++ cat /tmp/tmp.uahz6Ilrys ++ rm /tmp/tmp.8QENw9djHd /tmp/tmp.uahz6Ilrys ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.MlPR1M1oJS +++ mktemp ++ local LAST_ERR=/tmp/tmp.8MYB7djSpx ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.MlPR1M1oJS ++ cat /tmp/tmp.8MYB7djSpx ++ rm /tmp/tmp.MlPR1M1oJS /tmp/tmp.8MYB7djSpx ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.BWskQBwj7J ++ mktemp + local LAST_ERR=/tmp/tmp.eq7XF1Rjgn + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.BWskQBwj7J + cat /tmp/tmp.eq7XF1Rjgn + rm /tmp/tmp.BWskQBwj7J /tmp/tmp.eq7XF1Rjgn + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.nszFGcQl8i/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M0iVHu5yGy +++ mktemp ++ local LAST_ERR=/tmp/tmp.3NCMxHdrbY ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.M0iVHu5yGy ++ cat /tmp/tmp.3NCMxHdrbY ++ rm /tmp/tmp.M0iVHu5yGy /tmp/tmp.3NCMxHdrbY ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.jRq22UokZF ++ mktemp + local LAST_ERR=/tmp/tmp.XwJQrlThEK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.jRq22UokZF + cat /tmp/tmp.XwJQrlThEK + rm /tmp/tmp.jRq22UokZF /tmp/tmp.XwJQrlThEK + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.nszFGcQl8i/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fkRYY3jyIW +++ mktemp ++ local LAST_ERR=/tmp/tmp.N3jhxo74ta ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.fkRYY3jyIW ++ cat /tmp/tmp.N3jhxo74ta ++ rm /tmp/tmp.fkRYY3jyIW /tmp/tmp.N3jhxo74ta ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.fSz8emDY8L ++ mktemp + local LAST_ERR=/tmp/tmp.6Ci2FI5Dlm + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.fSz8emDY8L + cat /tmp/tmp.6Ci2FI5Dlm + rm /tmp/tmp.fSz8emDY8L /tmp/tmp.6Ci2FI5Dlm + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.nszFGcQl8i/find2 + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-sharded-18981 2024-06-04 14:47:16 55 myApp.test.gz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("67050037-33ac-4f6b-b74a-82f1d133d7bf") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("47a46f39-4e87-4a81-84a0-9867e2072806") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-18981.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("1e54053d-49b4-4d7a-9672-3a7eb9cc2662") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created waiting psmdb-restore/backup-minio to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OP6OLWv0K5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.TlUQ0ZWhuv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.OP6OLWv0K5 ++ cat /tmp/tmp.TlUQ0ZWhuv ++ rm /tmp/tmp.OP6OLWv0K5 /tmp/tmp.TlUQ0ZWhuv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SB7t7GjwRF +++ mktemp ++ local LAST_ERR=/tmp/tmp.JvpYw9u5y6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.SB7t7GjwRF ++ cat /tmp/tmp.JvpYw9u5y6 ++ rm /tmp/tmp.SB7t7GjwRF /tmp/tmp.JvpYw9u5y6 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IaSijnTJsM +++ mktemp ++ local LAST_ERR=/tmp/tmp.qdUD3E8SB9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.IaSijnTJsM ++ cat /tmp/tmp.qdUD3E8SB9 ++ rm /tmp/tmp.IaSijnTJsM /tmp/tmp.qdUD3E8SB9 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J4WoUGjGIE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ACMlFqv5gz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.J4WoUGjGIE ++ cat /tmp/tmp.ACMlFqv5gz ++ rm /tmp/tmp.J4WoUGjGIE /tmp/tmp.ACMlFqv5gz ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Uhr0h0qZWO +++ mktemp ++ local LAST_ERR=/tmp/tmp.c5y76SQu7Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Uhr0h0qZWO ++ cat /tmp/tmp.c5y76SQu7Q ++ rm /tmp/tmp.Uhr0h0qZWO /tmp/tmp.c5y76SQu7Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zlcYKeMT6W +++ mktemp ++ local LAST_ERR=/tmp/tmp.qeBHdnrhfA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.zlcYKeMT6W ++ cat /tmp/tmp.qeBHdnrhfA ++ rm /tmp/tmp.zlcYKeMT6W /tmp/tmp.qeBHdnrhfA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wxmkAF1698 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QT1hWR5Mzw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.wxmkAF1698 ++ cat /tmp/tmp.QT1hWR5Mzw ++ rm /tmp/tmp.wxmkAF1698 /tmp/tmp.QT1hWR5Mzw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tq7c6kQEAX +++ mktemp ++ local LAST_ERR=/tmp/tmp.lYjLd8CVwO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.tq7c6kQEAX ++ cat /tmp/tmp.lYjLd8CVwO ++ rm /tmp/tmp.tq7c6kQEAX /tmp/tmp.lYjLd8CVwO ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.d4svxBwOUF +++ mktemp ++ local LAST_ERR=/tmp/tmp.2nUCB5uDKX ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.d4svxBwOUF ++ cat /tmp/tmp.2nUCB5uDKX ++ rm /tmp/tmp.d4svxBwOUF /tmp/tmp.2nUCB5uDKX ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.OLG33eZQmL ++ mktemp + local LAST_ERR=/tmp/tmp.QzJppV80if + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.OLG33eZQmL + cat /tmp/tmp.QzJppV80if + rm /tmp/tmp.OLG33eZQmL /tmp/tmp.QzJppV80if + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.nszFGcQl8i/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ypu4npY2jX +++ mktemp ++ local LAST_ERR=/tmp/tmp.DNDMFNqecO ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Ypu4npY2jX ++ cat /tmp/tmp.DNDMFNqecO ++ rm /tmp/tmp.Ypu4npY2jX /tmp/tmp.DNDMFNqecO ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.uSwycCBLEJ ++ mktemp + local LAST_ERR=/tmp/tmp.Ifz5P3hL2u + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.uSwycCBLEJ + cat /tmp/tmp.Ifz5P3hL2u + rm /tmp/tmp.uSwycCBLEJ /tmp/tmp.Ifz5P3hL2u + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.nszFGcQl8i/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-18981 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-18981 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-18981 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BI6m3HNwl7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.JGndSDxZ7A ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.BI6m3HNwl7 ++ cat /tmp/tmp.JGndSDxZ7A ++ rm /tmp/tmp.BI6m3HNwl7 /tmp/tmp.JGndSDxZ7A ++ return 0 + local client_container=psmdb-client-7469665986-w5rqr + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.cgRDpP4iYs ++ mktemp + local LAST_ERR=/tmp/tmp.4W2nB4m7UB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-w5rqr -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-18981.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.cgRDpP4iYs + cat /tmp/tmp.4W2nB4m7UB + rm /tmp/tmp.cgRDpP4iYs /tmp/tmp.4W2nB4m7UB + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1559/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.nszFGcQl8i/find2 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-sharded-18981 ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- secrets=YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 pods=minio-service-57dd49b-hgf4t psmdb-client-7469665986-w5rqr some-name-cfg-0 some-name-cfg-1 some-name-cfg-2 some-name-mongos-0 some-name-mongos-1 some-name-mongos-2 some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-minio-service-57dd49b-hgf4t-minio.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-psmdb-client-7469665986-w5rqr-psmdb-client.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-0-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-0-cfg-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-0-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-1-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-1-cfg-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-1-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-2-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-2-cfg-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-cfg-2-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-mongos-0-mongos.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-mongos-0-mongos-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-mongos-1-mongos.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-mongos-1-mongos-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-mongos-2-mongos.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-mongos-2-mongos-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs0-0-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs0-0-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs0-1-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs0-1-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs0-2-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs0-2-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs1-0-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs1-0-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs1-1-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs1-1-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs1-2-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs1-2-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-0-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-0-rs-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-0-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-1-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-1-rs-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-1-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-2-mongod.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-2-rs-sidecar-1.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-some-name-rs2-2-backup-agent.txt logs saved in: /tmp/tmp.nszFGcQl8i/logs_output-percona-server-mongodb-operator-54f884cbc-28rjc-percona-server-mongodb-operator.txt runtimeclass.node.k8s.io "container-rc" deleted ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted