++ echo 'Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/logs/demand-backup-sharded.log' Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/logs/demand-backup-sharded.log ++ '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/conf/cloud-secret.yml ']' ++ SKIP_BACKUPS_TO_AWS_GCP_AZURE= ++ oc get projects ++ grep '^minikube' ++ kubectl get nodes +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep '\-eks\-' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' ']' ++ EKS=0 +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep gke WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' v1.26.15-gke.1191000 ']' ++ GKE=1 +++ jq -r '.serverVersion.major + "." + .serverVersion.minor' +++ kubectl version -o json +++ /usr/bin/sed -r 's/[^0-9.]+//g' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ KUBE_VERSION=1.26 + set_debug + [[ 0 == 1 ]] + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- namespace/psmdb-operator created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1540-1d9c9379-4-cluster2" modified. ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created deployment.apps/percona-server-mongodb-operator created waiting for pod/percona-server-mongodb-operator-86bb75c784-9gx9f to be ready.OK ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-sharded-14208 ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace demand-backup-sharded-14208 ----------------------------------------------------------------------------------- namespace/demand-backup-sharded-14208 created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1540-1d9c9379-4-cluster2" modified. ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- Error: uninstall: Release not loaded: minio-service: release: not found "minio" has been removed from your repositories "minio" has been added to your repositories NAME: minio-service LAST DEPLOYED: Sat May 4 12:11:31 2024 NAMESPACE: demand-backup-sharded-14208 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-sharded-14208.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-sharded-14208 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-sharded-14208 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-sharded-14208 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-sharded-14208 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local waiting for pod/minio-service-57dd49b-9hcrv to be ready.OK service/minio-service created make_bucket: operator-testing pod "aws-cli" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-sharded-14208 ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- secret/some-users created deployment.apps/psmdb-client created ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created runtimeclass.node.k8s.io/container-rc unchanged perconaservermongodb.psmdb.percona.com/some-name created ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- waiting for pod/some-name-rs0-0 to be ready.........OK waiting for pod/some-name-rs0-1 to be ready........OK waiting for pod/some-name-rs0-2 to be ready.......OK Waiting for cluster readyness........................................... waiting for pod/some-name-cfg-0 to be ready.OK waiting for pod/some-name-cfg-1 to be ready.OK waiting for pod/some-name-cfg-2 to be ready.OK waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- secret/some-name-mongos created waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness.............................................................................................. ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("db8a1298-a510-4109-a19c-33e2e5fdb5da") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" }, { "db" : "myApp1", "role" : "readWrite" }, { "db" : "myApp2", "role" : "readWrite" } ] } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("277ff03f-7e0b-4c64-a675-d553f1ea47e6") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714825223, 9), "signature" : { "hash" : BinData(0,"R8XfYC2CAFTqX7TIMdTV2sK7dFU="), "keyId" : NumberLong("7365116515974119447") } }, "operationTime" : Timestamp(1714825223, 2) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("612596ed-b1aa-4441-abe2-4008787dec1f") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714825226, 9), "signature" : { "hash" : BinData(0,"pWioBbJL+VhDjFPDDskQStIwUds="), "keyId" : NumberLong("7365116515974119447") } }, "operationTime" : Timestamp(1714825226, 4) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("0fbefeff-debe-417c-92d7-26e0456ec502") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1714825230, 11), "signature" : { "hash" : BinData(0,"HwvU51B2vu2APp6utitcB9w2V3M="), "keyId" : NumberLong("7365116515974119447") } }, "operationTime" : Timestamp(1714825230, 6) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("2f472cfa-c204-4948-af45-928c4f55e62d") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("2da22340-eff3-4035-a7ea-bcb1cefff5ec") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("4ae24550-33fa-4bcf-ad44-c318a793cb1f") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-minio created ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created backup-aws-s3................................... backup-gcp-cs.................... backup-azure-blob................. backup-minio. ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("b43df609-aa92-4c24-9ae7-c53d282e8b1e") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("f71a92f2-dd37-4fec-9438-ebb0d20700c7") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("b5960e9e-962d-4abf-a27a-3f94ca20b92d") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created waiting psmdb-restore/backup-aws-s3 to reach ready state............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bDeHKz8MGM +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tf2HZICzlo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.bDeHKz8MGM ++ cat /tmp/tmp.Tf2HZICzlo ++ rm /tmp/tmp.bDeHKz8MGM /tmp/tmp.Tf2HZICzlo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DqVZpBSLNq +++ mktemp ++ local LAST_ERR=/tmp/tmp.sPv9Bo5H9Z ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.DqVZpBSLNq ++ cat /tmp/tmp.sPv9Bo5H9Z ++ rm /tmp/tmp.DqVZpBSLNq /tmp/tmp.sPv9Bo5H9Z ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xoIAd0AeQu +++ mktemp ++ local LAST_ERR=/tmp/tmp.eXHq4PAXL8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.xoIAd0AeQu ++ cat /tmp/tmp.eXHq4PAXL8 ++ rm /tmp/tmp.xoIAd0AeQu /tmp/tmp.eXHq4PAXL8 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jbTZLvftMe +++ mktemp ++ local LAST_ERR=/tmp/tmp.dAjzXFvpb7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.jbTZLvftMe ++ cat /tmp/tmp.dAjzXFvpb7 ++ rm /tmp/tmp.jbTZLvftMe /tmp/tmp.dAjzXFvpb7 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WpH6DDVnj8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.1kCEad5Gvo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.WpH6DDVnj8 ++ cat /tmp/tmp.1kCEad5Gvo ++ rm /tmp/tmp.WpH6DDVnj8 /tmp/tmp.1kCEad5Gvo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wvW6F7ArSp +++ mktemp ++ local LAST_ERR=/tmp/tmp.dVQbIb0Wmo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.wvW6F7ArSp ++ cat /tmp/tmp.dVQbIb0Wmo ++ rm /tmp/tmp.wvW6F7ArSp /tmp/tmp.dVQbIb0Wmo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IHOgqpPDpi +++ mktemp ++ local LAST_ERR=/tmp/tmp.vSdzLmUUr5 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.IHOgqpPDpi ++ cat /tmp/tmp.vSdzLmUUr5 ++ rm /tmp/tmp.IHOgqpPDpi /tmp/tmp.vSdzLmUUr5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uktKu7mztw +++ mktemp ++ local LAST_ERR=/tmp/tmp.SctuNCT2Pj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.uktKu7mztw ++ cat /tmp/tmp.SctuNCT2Pj ++ rm /tmp/tmp.uktKu7mztw /tmp/tmp.SctuNCT2Pj ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7w2EZmCdfS +++ mktemp ++ local LAST_ERR=/tmp/tmp.Pb2vyTtTjC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.7w2EZmCdfS ++ cat /tmp/tmp.Pb2vyTtTjC ++ rm /tmp/tmp.7w2EZmCdfS /tmp/tmp.Pb2vyTtTjC ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.c5RR04knW5 ++ mktemp + local LAST_ERR=/tmp/tmp.CxAn9ceiYl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.c5RR04knW5 + cat /tmp/tmp.CxAn9ceiYl + rm /tmp/tmp.c5RR04knW5 /tmp/tmp.CxAn9ceiYl + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.1h4gczWLFO/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uK0ah0Xr6H +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_ERR=/tmp/tmp.QvUtgdPBzM ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.uK0ah0Xr6H ++ cat /tmp/tmp.QvUtgdPBzM ++ rm /tmp/tmp.uK0ah0Xr6H /tmp/tmp.QvUtgdPBzM ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.57k8gsOkxp ++ mktemp + local LAST_ERR=/tmp/tmp.k1IesBWKfL + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.57k8gsOkxp + cat /tmp/tmp.k1IesBWKfL + rm /tmp/tmp.57k8gsOkxp /tmp/tmp.k1IesBWKfL + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.1h4gczWLFO/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C9AIQjxN4C +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZuEeKRia1I ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.C9AIQjxN4C ++ cat /tmp/tmp.ZuEeKRia1I ++ rm /tmp/tmp.C9AIQjxN4C /tmp/tmp.ZuEeKRia1I ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.UJKU4Kl2Pm ++ mktemp + local LAST_ERR=/tmp/tmp.WBhpzA2RMR + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.UJKU4Kl2Pm + cat /tmp/tmp.WBhpzA2RMR + rm /tmp/tmp.UJKU4Kl2Pm /tmp/tmp.WBhpzA2RMR + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.1h4gczWLFO/find2 + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("5a06b697-eeb4-4969-b1d5-5cb6e15294ac") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("6bd69b2c-2606-4a59-ab6d-6e11c0c94483") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("0a553aa1-2f0b-441f-ad6d-c3e8a94b17da") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created waiting psmdb-restore/backup-gcp-cs to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TJ3Crpgnwy +++ mktemp ++ local LAST_ERR=/tmp/tmp.sB6xDPVX22 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.TJ3Crpgnwy ++ cat /tmp/tmp.sB6xDPVX22 ++ rm /tmp/tmp.TJ3Crpgnwy /tmp/tmp.sB6xDPVX22 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.idE2kftjNC +++ mktemp ++ local LAST_ERR=/tmp/tmp.WZC9rQYRDy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.idE2kftjNC ++ cat /tmp/tmp.WZC9rQYRDy ++ rm /tmp/tmp.idE2kftjNC /tmp/tmp.WZC9rQYRDy ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YHN7CSx1GM +++ mktemp ++ local LAST_ERR=/tmp/tmp.PNSdhnYQYv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.YHN7CSx1GM ++ cat /tmp/tmp.PNSdhnYQYv ++ rm /tmp/tmp.YHN7CSx1GM /tmp/tmp.PNSdhnYQYv ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f3cIXObSvg +++ mktemp ++ local LAST_ERR=/tmp/tmp.HDjp89irMk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.f3cIXObSvg ++ cat /tmp/tmp.HDjp89irMk ++ rm /tmp/tmp.f3cIXObSvg /tmp/tmp.HDjp89irMk ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SkRTlw63kZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.StebBHQS0K ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.SkRTlw63kZ ++ cat /tmp/tmp.StebBHQS0K ++ rm /tmp/tmp.SkRTlw63kZ /tmp/tmp.StebBHQS0K ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aWYnxMonCf +++ mktemp ++ local LAST_ERR=/tmp/tmp.RlOXJZp37h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.aWYnxMonCf ++ cat /tmp/tmp.RlOXJZp37h ++ rm /tmp/tmp.aWYnxMonCf /tmp/tmp.RlOXJZp37h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cuxN6bEF0L +++ mktemp ++ local LAST_ERR=/tmp/tmp.EId8JaQrjU ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.cuxN6bEF0L ++ cat /tmp/tmp.EId8JaQrjU ++ rm /tmp/tmp.cuxN6bEF0L /tmp/tmp.EId8JaQrjU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2l8yMB7fCf +++ mktemp ++ local LAST_ERR=/tmp/tmp.sd8yV3wZnb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.2l8yMB7fCf ++ cat /tmp/tmp.sd8yV3wZnb ++ rm /tmp/tmp.2l8yMB7fCf /tmp/tmp.sd8yV3wZnb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ioxYy1vjj5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tyHomzagRF ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ioxYy1vjj5 ++ cat /tmp/tmp.tyHomzagRF ++ rm /tmp/tmp.ioxYy1vjj5 /tmp/tmp.tyHomzagRF ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tsLLyjzFtd +++ mktemp ++ local LAST_ERR=/tmp/tmp.DOhDwHzj6U ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.tsLLyjzFtd ++ cat /tmp/tmp.DOhDwHzj6U ++ rm /tmp/tmp.tsLLyjzFtd /tmp/tmp.DOhDwHzj6U ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.OcqjikFaPw ++ mktemp + local LAST_ERR=/tmp/tmp.j19hAgwPYC + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.OcqjikFaPw + cat /tmp/tmp.j19hAgwPYC + rm /tmp/tmp.OcqjikFaPw /tmp/tmp.j19hAgwPYC + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.1h4gczWLFO/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XhTW2XOP07 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ieB66nMVBe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.XhTW2XOP07 ++ cat /tmp/tmp.ieB66nMVBe ++ rm /tmp/tmp.XhTW2XOP07 /tmp/tmp.ieB66nMVBe ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.oE4omIwSa2 ++ mktemp + local LAST_ERR=/tmp/tmp.jJexGsL3MK + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.oE4omIwSa2 + cat /tmp/tmp.jJexGsL3MK + rm /tmp/tmp.oE4omIwSa2 /tmp/tmp.jJexGsL3MK + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.1h4gczWLFO/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1vSROegZkI + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_ERR=/tmp/tmp.oJyWfiaAna ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.1vSROegZkI ++ cat /tmp/tmp.oJyWfiaAna ++ rm /tmp/tmp.1vSROegZkI /tmp/tmp.oJyWfiaAna ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.YrY76UdTu1 ++ mktemp + local LAST_ERR=/tmp/tmp.T1aLpJXxyP + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.YrY76UdTu1 + cat /tmp/tmp.T1aLpJXxyP + rm /tmp/tmp.YrY76UdTu1 /tmp/tmp.T1aLpJXxyP + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.1h4gczWLFO/find2 + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("7c039b3a-bded-42ee-9951-eabc60e4301c") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("5a4e0d78-cf28-4f9e-a8f0-b557ea111f46") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("4d6e1acf-a543-4318-af07-8161934390c1") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created waiting psmdb-restore/backup-azure-blob to reach ready state........... + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eFcuzZTKyg +++ mktemp ++ local LAST_ERR=/tmp/tmp.ehwBmOCPI8 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.eFcuzZTKyg ++ cat /tmp/tmp.ehwBmOCPI8 ++ rm /tmp/tmp.eFcuzZTKyg /tmp/tmp.ehwBmOCPI8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P9HOFCNerN +++ mktemp ++ local LAST_ERR=/tmp/tmp.HaH42syXSm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.P9HOFCNerN ++ cat /tmp/tmp.HaH42syXSm ++ rm /tmp/tmp.P9HOFCNerN /tmp/tmp.HaH42syXSm ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oFoVU27ILE +++ mktemp ++ local LAST_ERR=/tmp/tmp.I5WtLt518y ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.oFoVU27ILE ++ cat /tmp/tmp.I5WtLt518y ++ rm /tmp/tmp.oFoVU27ILE /tmp/tmp.I5WtLt518y ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zkHs0XBwTn +++ mktemp ++ local LAST_ERR=/tmp/tmp.CAmVQESiRm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.zkHs0XBwTn ++ cat /tmp/tmp.CAmVQESiRm ++ rm /tmp/tmp.zkHs0XBwTn /tmp/tmp.CAmVQESiRm ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YKuprPzjhx +++ mktemp ++ local LAST_ERR=/tmp/tmp.NbdYYwIy5u ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.YKuprPzjhx ++ cat /tmp/tmp.NbdYYwIy5u ++ rm /tmp/tmp.YKuprPzjhx /tmp/tmp.NbdYYwIy5u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aGDHTbZHE9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fyxR4FvAv3 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.aGDHTbZHE9 ++ cat /tmp/tmp.fyxR4FvAv3 ++ rm /tmp/tmp.aGDHTbZHE9 /tmp/tmp.fyxR4FvAv3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ob3Nt3nsSZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.yNjBzPrEAL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ob3Nt3nsSZ ++ cat /tmp/tmp.yNjBzPrEAL ++ rm /tmp/tmp.ob3Nt3nsSZ /tmp/tmp.yNjBzPrEAL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8xFtGdb0FD +++ mktemp ++ local LAST_ERR=/tmp/tmp.FdiFdUro2F ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.8xFtGdb0FD ++ cat /tmp/tmp.FdiFdUro2F ++ rm /tmp/tmp.8xFtGdb0FD /tmp/tmp.FdiFdUro2F ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ local LAST_OUT=/tmp/tmp.cuJanDH2mZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.A7ixyS2U7B ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.cuJanDH2mZ ++ cat /tmp/tmp.A7ixyS2U7B ++ rm /tmp/tmp.cuJanDH2mZ /tmp/tmp.A7ixyS2U7B ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.SR6TepaWBL ++ mktemp + local LAST_ERR=/tmp/tmp.sevHsBFsoT + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.SR6TepaWBL + cat /tmp/tmp.sevHsBFsoT + rm /tmp/tmp.SR6TepaWBL /tmp/tmp.sevHsBFsoT + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.1h4gczWLFO/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MDfYK3xVtS +++ mktemp ++ local LAST_ERR=/tmp/tmp.ORShmyCCir ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.MDfYK3xVtS ++ cat /tmp/tmp.ORShmyCCir ++ rm /tmp/tmp.MDfYK3xVtS /tmp/tmp.ORShmyCCir ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Yl6YiW8SKD ++ mktemp + local LAST_ERR=/tmp/tmp.MXfr8bnJoQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.Yl6YiW8SKD + cat /tmp/tmp.MXfr8bnJoQ + rm /tmp/tmp.Yl6YiW8SKD /tmp/tmp.MXfr8bnJoQ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.1h4gczWLFO/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WibTm5WZ8i +++ mktemp ++ local LAST_ERR=/tmp/tmp.gCA632rdRv ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.WibTm5WZ8i ++ cat /tmp/tmp.gCA632rdRv ++ rm /tmp/tmp.WibTm5WZ8i /tmp/tmp.gCA632rdRv ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.dxhMbAFWLa ++ mktemp + local LAST_ERR=/tmp/tmp.LpotHSnsHp + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.dxhMbAFWLa + cat /tmp/tmp.LpotHSnsHp + rm /tmp/tmp.dxhMbAFWLa /tmp/tmp.LpotHSnsHp + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.1h4gczWLFO/find2 + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-sharded-14208 2024-05-04 12:21:26 55 myApp.test.gz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("8e83e079-113a-4ddb-bec2-48f59c308c6f") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("39f1b83e-aee7-4e0b-b700-3044dda004b7") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-14208.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("1fc3c478-35d7-4899-a31a-daece6b84a49") } Percona Server for MongoDB server version: v7.0.8-5 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created waiting psmdb-restore/backup-minio to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z39DzsTtDd +++ mktemp ++ local LAST_ERR=/tmp/tmp.eqLZZdU5z9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Z39DzsTtDd ++ cat /tmp/tmp.eqLZZdU5z9 ++ rm /tmp/tmp.Z39DzsTtDd /tmp/tmp.eqLZZdU5z9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KU7cs9A0h1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.HKSAhYbtMN ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.KU7cs9A0h1 ++ cat /tmp/tmp.HKSAhYbtMN ++ rm /tmp/tmp.KU7cs9A0h1 /tmp/tmp.HKSAhYbtMN ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BhiyeSGvVo +++ mktemp ++ local LAST_ERR=/tmp/tmp.LXzM2j0B2V ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.BhiyeSGvVo ++ cat /tmp/tmp.LXzM2j0B2V ++ rm /tmp/tmp.BhiyeSGvVo /tmp/tmp.LXzM2j0B2V ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E2m5MuptjM +++ mktemp ++ local LAST_ERR=/tmp/tmp.JDVgSWuPb4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.E2m5MuptjM ++ cat /tmp/tmp.JDVgSWuPb4 ++ rm /tmp/tmp.E2m5MuptjM /tmp/tmp.JDVgSWuPb4 ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aIlr8fVTTH +++ mktemp ++ local LAST_ERR=/tmp/tmp.RG0VoFE3UC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.aIlr8fVTTH ++ cat /tmp/tmp.RG0VoFE3UC ++ rm /tmp/tmp.aIlr8fVTTH /tmp/tmp.RG0VoFE3UC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.viX3ahHMfG +++ mktemp ++ local LAST_ERR=/tmp/tmp.hGrmVI7lxb ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.viX3ahHMfG ++ cat /tmp/tmp.hGrmVI7lxb ++ rm /tmp/tmp.viX3ahHMfG /tmp/tmp.hGrmVI7lxb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MYDppLgAPe +++ mktemp ++ local LAST_ERR=/tmp/tmp.4K7uJTUI9Q ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.MYDppLgAPe ++ cat /tmp/tmp.4K7uJTUI9Q ++ rm /tmp/tmp.MYDppLgAPe /tmp/tmp.4K7uJTUI9Q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5Ag6zWDSed +++ mktemp ++ local LAST_ERR=/tmp/tmp.i1ZY58p4oy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.5Ag6zWDSed ++ cat /tmp/tmp.i1ZY58p4oy ++ rm /tmp/tmp.5Ag6zWDSed /tmp/tmp.i1ZY58p4oy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SVrdAb93Yp +++ mktemp ++ local LAST_ERR=/tmp/tmp.DbhAPvB4dc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.SVrdAb93Yp ++ cat /tmp/tmp.DbhAPvB4dc ++ rm /tmp/tmp.SVrdAb93Yp /tmp/tmp.DbhAPvB4dc ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BSp5m0sw7t + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' +++ mktemp ++ local LAST_ERR=/tmp/tmp.iGjHHCUkeo ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.BSp5m0sw7t ++ cat /tmp/tmp.iGjHHCUkeo ++ rm /tmp/tmp.BSp5m0sw7t /tmp/tmp.iGjHHCUkeo ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Z6n2fFeNWk ++ mktemp + local LAST_ERR=/tmp/tmp.USwKDowTV5 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.Z6n2fFeNWk + cat /tmp/tmp.USwKDowTV5 + rm /tmp/tmp.Z6n2fFeNWk /tmp/tmp.USwKDowTV5 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.1h4gczWLFO/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Fb1AGSeryF +++ mktemp ++ local LAST_ERR=/tmp/tmp.jG13eUImM2 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Fb1AGSeryF ++ cat /tmp/tmp.jG13eUImM2 ++ rm /tmp/tmp.Fb1AGSeryF /tmp/tmp.jG13eUImM2 ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.6kYraSoVZU ++ mktemp + local LAST_ERR=/tmp/tmp.8Ubvp9TsR3 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.6kYraSoVZU + cat /tmp/tmp.8Ubvp9TsR3 + rm /tmp/tmp.6kYraSoVZU /tmp/tmp.8Ubvp9TsR3 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.1h4gczWLFO/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-14208 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-14208 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-14208 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Cy4du6cwCm +++ mktemp ++ local LAST_ERR=/tmp/tmp.FGKNO00pm9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.Cy4du6cwCm ++ cat /tmp/tmp.FGKNO00pm9 ++ rm /tmp/tmp.Cy4du6cwCm /tmp/tmp.FGKNO00pm9 ++ return 0 + local client_container=psmdb-client-5dc94d5b48-p4m54 + local mongo_flag= + kubectl_bin exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.gfjuT01tTQ ++ mktemp + local LAST_ERR=/tmp/tmp.2wX1grGqTY + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-5dc94d5b48-p4m54 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-14208.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.gfjuT01tTQ + cat /tmp/tmp.2wX1grGqTY + rm /tmp/tmp.gfjuT01tTQ /tmp/tmp.2wX1grGqTY + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1540/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.1h4gczWLFO/find2 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- secrets=YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 pods=minio-service-57dd49b-9hcrv psmdb-client-5dc94d5b48-p4m54 some-name-cfg-0 some-name-cfg-1 some-name-cfg-2 some-name-mongos-0 some-name-mongos-1 some-name-mongos-2 some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-minio-service-57dd49b-9hcrv-minio.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-psmdb-client-5dc94d5b48-p4m54-psmdb-client.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-0-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-0-cfg-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-0-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-1-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-1-cfg-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-1-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-2-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-2-cfg-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-cfg-2-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-mongos-0-mongos.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-mongos-0-mongos-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-mongos-1-mongos.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-mongos-1-mongos-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-mongos-2-mongos.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-mongos-2-mongos-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs0-0-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs0-0-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs0-1-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs0-1-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs0-2-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs0-2-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs1-0-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs1-0-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs1-1-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs1-1-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs1-2-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs1-2-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-0-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-0-rs-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-0-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-1-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-1-rs-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-1-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-2-mongod.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-2-rs-sidecar-1.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-some-name-rs2-2-backup-agent.txt logs saved in: /tmp/tmp.1h4gczWLFO/logs_output-percona-server-mongodb-operator-86bb75c784-9gx9f-percona-server-mongodb-operator.txt runtimeclass.node.k8s.io "container-rc" deleted ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' E0504 12:35:30.028811 2660 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-12-0: the server could not find the requested resource E0504 12:35:30.189172 2660 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-11-0: the server could not find the requested resource E0504 12:35:30.190077 2660 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-10-0: the server could not find the requested resource E0504 12:35:30.190273 2660 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1: the server could not find the requested resource error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted