++ echo 'Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/logs/demand-backup-sharded.log' Log: /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/logs/demand-backup-sharded.log ++ '[' -f /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/conf/cloud-secret.yml ']' ++ SKIP_BACKUPS_TO_AWS_GCP_AZURE= ++ oc get projects ++ kubectl get nodes ++ grep '^minikube' E0610 14:00:47.181202 5691 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:47.390140 5691 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:47.499017 5691 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:47.604865 5691 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep '\-eks\-' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' ']' ++ EKS=0 +++ kubectl version -o json +++ jq -r .serverVersion.gitVersion +++ grep gke WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ '[' v1.26.15-gke.1390000 ']' ++ GKE=1 +++ kubectl version -o json +++ jq -r '.serverVersion.major + "." + .serverVersion.minor' +++ /usr/bin/sed -r 's/[^0-9.]+//g' WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 ++ KUBE_VERSION=1.26 + set_debug + [[ 0 == 1 ]] + set +o xtrace ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- E0610 14:00:50.737238 5918 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:50.964434 5918 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:52.903413 6267 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:53.227893 6267 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:53.338224 6267 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:53.444819 6267 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:53.911559 6267 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:54.027212 6267 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:54.142177 6267 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:54.248337 6267 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:54.354483 6267 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' E0610 14:00:55.971877 6564 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:56.152030 6564 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:56.258050 6564 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:56.363682 6564 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:56.681339 6564 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:56.914726 6564 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:57.025479 6564 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:57.131683 6564 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:57.238233 6564 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request error: the server doesn't have a resource type "perconaservermongodbbackups" E0610 14:00:58.454122 6853 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:58.772045 6853 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:58.880473 6853 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:00:58.989740 6853 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:00.366610 7006 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:00.683109 7006 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:00.790266 7006 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:00.898179 7006 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:01.227784 7006 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:01.444846 7006 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:01.554375 7006 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:01.661563 7006 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:01.772981 7006 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' E0610 14:01:03.212960 7237 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:03.437007 7237 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:03.544223 7237 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:03.650590 7237 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:03.983137 7237 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:04.189488 7237 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:04.299219 7237 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:04.405836 7237 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:04.512554 7237 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request error: the server doesn't have a resource type "perconaservermongodbrestores" E0610 14:01:06.120313 7587 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:06.436010 7587 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:06.545836 7587 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:06.712388 7587 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:08.375968 7899 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:08.595593 7899 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:08.702077 7899 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:08.808666 7899 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:09.130666 7899 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:09.352727 7899 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:09.462450 7899 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:09.569079 7899 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:09.681921 7899 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' E0610 14:01:11.147709 8136 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:11.255733 8136 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:11.363342 8136 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:11.468632 8136 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:11.785525 8136 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:12.004291 8136 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:12.117593 8136 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:12.224049 8136 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:12.330218 8136 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request error: the server doesn't have a resource type "perconaservermongodbs" E0610 14:01:13.839189 8433 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:14.048208 8433 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:14.155288 8433 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:14.262125 8433 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:15.820243 8555 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:16.136244 8555 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:18.408731 8807 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:18.718763 8807 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:18.824152 8807 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found E0610 14:01:20.319888 8983 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:20.543691 8983 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:20.652255 8983 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found E0610 14:01:26.118014 9609 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:26.429118 9609 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:26.543601 9609 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found E0610 14:01:26.118014 9609 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:26.429118 9609 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:26.543601 9609 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request Error from server (NotFound): customresourcedefinitions.apiextensions.k8s.io "null" not found ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- E0610 14:01:36.285781 10276 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:36.500192 10276 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:36.689871 10276 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:36.824235 10276 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:38.385138 10330 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:38.493067 10330 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:38.599519 10330 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request E0610 14:01:38.707537 10330 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request error: resource(s) were provided, but no name was specified E0610 14:01:39.950069 10439 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces psmdb-operator ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace psmdb-operator ----------------------------------------------------------------------------------- namespace/psmdb-operator created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1569-eb818ce7-2-cluster2" modified. ----------------------------------------------------------------------------------- start PSMDB operator ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io/perconaservermongodbbackups.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbrestores.psmdb.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaservermongodbs.psmdb.percona.com serverside-applied clusterrole.rbac.authorization.k8s.io/percona-server-mongodb-operator created serviceaccount/percona-server-mongodb-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-server-mongodb-operator created deployment.apps/percona-server-mongodb-operator created waiting for pod/percona-server-mongodb-operator-8d99b8f59-jmdfw to be ready..OK ----------------------------------------------------------------------------------- destroy chaos-mesh ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- cleaned up old namespaces demand-backup-sharded-2830 ----------------------------------------------------------------------------------- error: resource(s) were provided, but no name was specified ----------------------------------------------------------------------------------- create namespace demand-backup-sharded-2830 ----------------------------------------------------------------------------------- namespace/demand-backup-sharded-2830 created Context "gke_cloud-dev-112233_us-central1-a_jen-psmdb-1569-eb818ce7-2-cluster2" modified. ----------------------------------------------------------------------------------- install Minio ----------------------------------------------------------------------------------- Error: uninstall: Release not loaded: minio-service: release: not found Error: no repositories configured "minio" has been added to your repositories NAME: minio-service LAST DEPLOYED: Mon Jun 10 14:02:33 2024 NAMESPACE: demand-backup-sharded-2830 STATUS: deployed REVISION: 1 TEST SUITE: None NOTES: MinIO can be accessed via port 9000 on the following DNS name from within your cluster: minio-service.demand-backup-sharded-2830.svc.cluster.local To access MinIO from localhost, run the below commands: 1. export POD_NAME=$(kubectl get pods --namespace demand-backup-sharded-2830 -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") 2. kubectl port-forward $POD_NAME 9000 --namespace demand-backup-sharded-2830 Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace demand-backup-sharded-2830 minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace demand-backup-sharded-2830 minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 3. mc ls minio-service-local waiting for pod/minio-service-57dd49b-5gbgp to be ready.OK service/minio-service created make_bucket: operator-testing pod "aws-cli" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: error attaching to container: container is in CONTAINER_EXITED state ----------------------------------------------------------------------------------- create first PSMDB cluster ----------------------------------------------------------------------------------- secret/some-users created deployment.apps/psmdb-client created ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created runtimeclass.node.k8s.io/container-rc created perconaservermongodb.psmdb.percona.com/some-name created ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- waiting for pod/some-name-rs0-0 to be ready................OK waiting for pod/some-name-rs0-1 to be ready........OK waiting for pod/some-name-rs0-2 to be ready.......OK Waiting for cluster readyness.............................................. waiting for pod/some-name-cfg-0 to be ready.OK waiting for pod/some-name-cfg-1 to be ready.OK waiting for pod/some-name-cfg-2 to be ready.OK waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- secret/some-name-mongos created waiting for pod/some-name-mongos-0 to be ready.OK waiting for pod/some-name-mongos-1 to be ready.OK waiting for pod/some-name-mongos-2 to be ready.OK Waiting for cluster readyness................................................................................................ ----------------------------------------------------------------------------------- write data, read from all ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("da66c3d3-395e-49d2-ad3f-a708b2cdfcba") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match Successfully added user: { "user" : "myApp", "roles" : [ { "db" : "myApp", "role" : "readWrite" }, { "db" : "myApp1", "role" : "readWrite" }, { "db" : "myApp2", "role" : "readWrite" } ] } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("067409e0-e193-48cc-8c25-6759fee7767e") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1718028691, 11), "signature" : { "hash" : BinData(0,"88l3DBvshOWu6JBYLBuhCfw1SwE="), "keyId" : NumberLong("7378875332037705742") } }, "operationTime" : Timestamp(1718028691, 5) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("d429454a-21b0-4079-8458-c47cf2895eba") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1718028694, 13), "signature" : { "hash" : BinData(0,"yb/9tuqJvOsp9wS67mAW7Jjql30="), "keyId" : NumberLong("7378875332037705742") } }, "operationTime" : Timestamp(1718028694, 8) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("565c1e4c-c8e8-4c65-b99d-db281934b9d5") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match { "ok" : 1, "$clusterTime" : { "clusterTime" : Timestamp(1718028697, 11), "signature" : { "hash" : BinData(0,"I81jJreKXJzNmCqDTaLUuhrIybo="), "keyId" : NumberLong("7378875332037705742") } }, "operationTime" : Timestamp(1718028697, 6) } bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("fb25e1db-d122-4be5-b437-f7871b02ae38") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("dddb3e20-02b9-4f73-afd4-b16687b41a6c") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("9f35b21e-7832-48ae-9b37-e94c94f78378") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 ----------------------------------------------------------------------------------- run backups ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- run backup backup-minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-minio created ----------------------------------------------------------------------------------- run backup backup-aws-s3 ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-aws-s3 created ----------------------------------------------------------------------------------- run backup backup-gcp-cs ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-gcp-cs created ----------------------------------------------------------------------------------- run backup backup-azure-blob ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com/backup-azure-blob created backup-aws-s3................................................. backup-gcp-cs.................. backup-azure-blob................... backup-minio. ----------------------------------------------------------------------------------- check backup and restore -- aws-s3 ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("7ba33950-1a04-47b5-8a64-7c7a9eacd465") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("cd0cc0cc-afe2-416e-8a2a-c86be9e79d53") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("10130b3d-d183-4278-b999-5e4981cd281b") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-aws-s3 created waiting psmdb-restore/backup-aws-s3 to reach ready state................ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NZH8WWrq5d +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ihr0Ifpf85 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.NZH8WWrq5d ++ cat /tmp/tmp.Ihr0Ifpf85 ++ rm /tmp/tmp.NZH8WWrq5d /tmp/tmp.Ihr0Ifpf85 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U4vXNezm5I +++ mktemp ++ local LAST_ERR=/tmp/tmp.aumhNIQvLp ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.U4vXNezm5I ++ cat /tmp/tmp.aumhNIQvLp ++ rm /tmp/tmp.U4vXNezm5I /tmp/tmp.aumhNIQvLp ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vMeO2Zo6UY +++ mktemp ++ local LAST_ERR=/tmp/tmp.K3jUQXWXmr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.vMeO2Zo6UY ++ cat /tmp/tmp.K3jUQXWXmr ++ rm /tmp/tmp.vMeO2Zo6UY /tmp/tmp.K3jUQXWXmr ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vYbQfOQrJo +++ mktemp ++ local LAST_ERR=/tmp/tmp.Bgg1qESRFH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.vYbQfOQrJo ++ cat /tmp/tmp.Bgg1qESRFH ++ rm /tmp/tmp.vYbQfOQrJo /tmp/tmp.Bgg1qESRFH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FTgPcjuEIY +++ mktemp ++ local LAST_ERR=/tmp/tmp.qZqxlfogYE ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.FTgPcjuEIY ++ cat /tmp/tmp.qZqxlfogYE ++ rm /tmp/tmp.FTgPcjuEIY /tmp/tmp.qZqxlfogYE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FezfjAqfxo +++ mktemp ++ local LAST_ERR=/tmp/tmp.HuGcdCAdHf ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.FezfjAqfxo ++ cat /tmp/tmp.HuGcdCAdHf ++ rm /tmp/tmp.FezfjAqfxo /tmp/tmp.HuGcdCAdHf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TEgF9L0U5z +++ mktemp ++ local LAST_ERR=/tmp/tmp.GUwUxagA1t ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.TEgF9L0U5z ++ cat /tmp/tmp.GUwUxagA1t ++ rm /tmp/tmp.TEgF9L0U5z /tmp/tmp.GUwUxagA1t ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iVKSTAc1gA +++ mktemp ++ local LAST_ERR=/tmp/tmp.6Ozc4JMobs ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.iVKSTAc1gA ++ cat /tmp/tmp.6Ozc4JMobs ++ rm /tmp/tmp.iVKSTAc1gA /tmp/tmp.6Ozc4JMobs ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2EUWUiw1zv +++ mktemp ++ local LAST_ERR=/tmp/tmp.NPlvaPTkpB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.2EUWUiw1zv ++ cat /tmp/tmp.NPlvaPTkpB ++ rm /tmp/tmp.2EUWUiw1zv /tmp/tmp.NPlvaPTkpB ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Kj9cgS29nm ++ mktemp + local LAST_ERR=/tmp/tmp.nexnBk0w85 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.Kj9cgS29nm + cat /tmp/tmp.nexnBk0w85 + rm /tmp/tmp.Kj9cgS29nm /tmp/tmp.nexnBk0w85 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.9ebN8bx5Bl/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.llIL6QlY8Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.G4rBeM5SZg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.llIL6QlY8Y ++ cat /tmp/tmp.G4rBeM5SZg ++ rm /tmp/tmp.llIL6QlY8Y /tmp/tmp.G4rBeM5SZg ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.I8nRK2vl7o ++ mktemp + local LAST_ERR=/tmp/tmp.PRJcC8R5an + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.I8nRK2vl7o + cat /tmp/tmp.PRJcC8R5an + rm /tmp/tmp.I8nRK2vl7o /tmp/tmp.PRJcC8R5an + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.9ebN8bx5Bl/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6tcbqYY9Xv +++ mktemp ++ local LAST_ERR=/tmp/tmp.REBbPgGNTy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.6tcbqYY9Xv ++ cat /tmp/tmp.REBbPgGNTy ++ rm /tmp/tmp.6tcbqYY9Xv /tmp/tmp.REBbPgGNTy ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.WJgzCMOHOB ++ mktemp + local LAST_ERR=/tmp/tmp.8cIDys8Arl + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.WJgzCMOHOB + cat /tmp/tmp.8cIDys8Arl + rm /tmp/tmp.WJgzCMOHOB /tmp/tmp.8cIDys8Arl + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.9ebN8bx5Bl/find2 + desc 'check backup and restore -- gcp-cs' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- gcp-cs ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("26944142-c1bd-4ec6-b964-b0cba9f41529") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("dd4bf670-074d-47b5-99bd-f4d1b066a39a") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("a4992295-c9b9-4652-98e8-a20fd7a2a365") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-gcp-cs created waiting psmdb-restore/backup-gcp-cs to reach ready state............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ULS7tOXYmz +++ mktemp ++ local LAST_ERR=/tmp/tmp.LRECsSYi4h ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ULS7tOXYmz ++ cat /tmp/tmp.LRECsSYi4h ++ rm /tmp/tmp.ULS7tOXYmz /tmp/tmp.LRECsSYi4h ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zQQcHV6NAS +++ mktemp ++ local LAST_ERR=/tmp/tmp.OhGEzBL7eq ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.zQQcHV6NAS ++ cat /tmp/tmp.OhGEzBL7eq ++ rm /tmp/tmp.zQQcHV6NAS /tmp/tmp.OhGEzBL7eq ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H1v7OqsOQF +++ mktemp ++ local LAST_ERR=/tmp/tmp.1Fw2JuqqeA ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.H1v7OqsOQF ++ cat /tmp/tmp.1Fw2JuqqeA ++ rm /tmp/tmp.H1v7OqsOQF /tmp/tmp.1Fw2JuqqeA ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w04jXc7Ajv +++ mktemp ++ local LAST_ERR=/tmp/tmp.kJgmbVo1fg ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.w04jXc7Ajv ++ cat /tmp/tmp.kJgmbVo1fg ++ rm /tmp/tmp.w04jXc7Ajv /tmp/tmp.kJgmbVo1fg ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yaikO9LEst +++ mktemp ++ local LAST_ERR=/tmp/tmp.6WB3Pt8LMc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.yaikO9LEst ++ cat /tmp/tmp.6WB3Pt8LMc ++ rm /tmp/tmp.yaikO9LEst /tmp/tmp.6WB3Pt8LMc ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PITpI14HDz +++ mktemp ++ local LAST_ERR=/tmp/tmp.OKXXUJr5dk ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.PITpI14HDz ++ cat /tmp/tmp.OKXXUJr5dk ++ rm /tmp/tmp.PITpI14HDz /tmp/tmp.OKXXUJr5dk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iBnVfKoETi +++ mktemp ++ local LAST_ERR=/tmp/tmp.L5j77U5clz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.iBnVfKoETi ++ cat /tmp/tmp.L5j77U5clz ++ rm /tmp/tmp.iBnVfKoETi /tmp/tmp.L5j77U5clz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NShwBbFK0l +++ mktemp ++ local LAST_ERR=/tmp/tmp.WLWmqb5FxI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.NShwBbFK0l ++ cat /tmp/tmp.WLWmqb5FxI ++ rm /tmp/tmp.NShwBbFK0l /tmp/tmp.WLWmqb5FxI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.596A9YGIIp +++ mktemp ++ local LAST_ERR=/tmp/tmp.uOKKwFkq4f ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.596A9YGIIp ++ cat /tmp/tmp.uOKKwFkq4f ++ rm /tmp/tmp.596A9YGIIp /tmp/tmp.uOKKwFkq4f ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fkrh9alFic +++ mktemp ++ local LAST_ERR=/tmp/tmp.XUQjd9aru4 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.fkrh9alFic ++ cat /tmp/tmp.XUQjd9aru4 ++ rm /tmp/tmp.fkrh9alFic /tmp/tmp.XUQjd9aru4 ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.cSAj8qJxiQ ++ mktemp + local LAST_ERR=/tmp/tmp.RC2PvMtQof + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.cSAj8qJxiQ + cat /tmp/tmp.RC2PvMtQof + rm /tmp/tmp.cSAj8qJxiQ /tmp/tmp.RC2PvMtQof + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.9ebN8bx5Bl/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kGgDqsdAOJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.s72aWjDoeT ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.kGgDqsdAOJ ++ cat /tmp/tmp.s72aWjDoeT ++ rm /tmp/tmp.kGgDqsdAOJ /tmp/tmp.s72aWjDoeT ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.G5mMK0s8YO ++ mktemp + local LAST_ERR=/tmp/tmp.cEJTzz5AiB + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.G5mMK0s8YO + cat /tmp/tmp.cEJTzz5AiB + rm /tmp/tmp.G5mMK0s8YO /tmp/tmp.cEJTzz5AiB + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.9ebN8bx5Bl/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tOlmDJJT4Q +++ mktemp ++ local LAST_ERR=/tmp/tmp.3BYzc7y8Nz ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.tOlmDJJT4Q ++ cat /tmp/tmp.3BYzc7y8Nz ++ rm /tmp/tmp.tOlmDJJT4Q /tmp/tmp.3BYzc7y8Nz ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.8boScXqzVI ++ mktemp + local LAST_ERR=/tmp/tmp.GUX2W3KFRJ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.8boScXqzVI + cat /tmp/tmp.GUX2W3KFRJ + rm /tmp/tmp.8boScXqzVI /tmp/tmp.GUX2W3KFRJ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.9ebN8bx5Bl/find2 + desc 'check backup and restore -- azure-blob' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- azure-blob ----------------------------------------------------------------------------------- Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("2f9a4804-7168-4551-aeb9-ed8e6181a9e8") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("659fc722-b5b5-49f2-9194-b0655b022a53") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("fce26ca2-6ffb-4aee-9a70-e4fd0365d21c") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-azure-blob created waiting psmdb-restore/backup-azure-blob to reach ready state.............. + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fh1MJWVQXL +++ mktemp ++ local LAST_ERR=/tmp/tmp.WEPmdExfn7 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.fh1MJWVQXL ++ cat /tmp/tmp.WEPmdExfn7 ++ rm /tmp/tmp.fh1MJWVQXL /tmp/tmp.WEPmdExfn7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sprPR1Wbdu +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZpspLeKFbD ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.sprPR1Wbdu ++ cat /tmp/tmp.ZpspLeKFbD ++ rm /tmp/tmp.sprPR1Wbdu /tmp/tmp.ZpspLeKFbD ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ktwMWLQ4E5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ApJPgdZycB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ktwMWLQ4E5 ++ cat /tmp/tmp.ApJPgdZycB ++ rm /tmp/tmp.ktwMWLQ4E5 /tmp/tmp.ApJPgdZycB ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dsuzjzX947 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KFKvJ4RFGR ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.dsuzjzX947 ++ cat /tmp/tmp.KFKvJ4RFGR ++ rm /tmp/tmp.dsuzjzX947 /tmp/tmp.KFKvJ4RFGR ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5WR3lg6F97 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AczPf36cZV ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.5WR3lg6F97 ++ cat /tmp/tmp.AczPf36cZV ++ rm /tmp/tmp.5WR3lg6F97 /tmp/tmp.AczPf36cZV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4QRhCBVqMv +++ mktemp ++ local LAST_ERR=/tmp/tmp.xYz1yHm1dy ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.4QRhCBVqMv ++ cat /tmp/tmp.xYz1yHm1dy ++ rm /tmp/tmp.4QRhCBVqMv /tmp/tmp.xYz1yHm1dy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jRkXjwU8s8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nwFkr1Jgfa ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.jRkXjwU8s8 ++ cat /tmp/tmp.nwFkr1Jgfa ++ rm /tmp/tmp.jRkXjwU8s8 /tmp/tmp.nwFkr1Jgfa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ptfILRkQj3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.RifWXLheZr ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ptfILRkQj3 ++ cat /tmp/tmp.RifWXLheZr ++ rm /tmp/tmp.ptfILRkQj3 /tmp/tmp.RifWXLheZr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PVWon1o1wL +++ mktemp ++ local LAST_ERR=/tmp/tmp.uZHsznSZtI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.PVWon1o1wL ++ cat /tmp/tmp.uZHsznSZtI ++ rm /tmp/tmp.PVWon1o1wL /tmp/tmp.uZHsznSZtI ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8QDh6xmgq9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eTU9tT0SUm ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.8QDh6xmgq9 ++ cat /tmp/tmp.eTU9tT0SUm ++ rm /tmp/tmp.8QDh6xmgq9 /tmp/tmp.eTU9tT0SUm ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.FS05vm0FJM ++ mktemp + local LAST_ERR=/tmp/tmp.AijcQoBuFb + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.FS05vm0FJM + cat /tmp/tmp.AijcQoBuFb + rm /tmp/tmp.FS05vm0FJM /tmp/tmp.AijcQoBuFb + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.9ebN8bx5Bl/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hWJWqNvwNH +++ mktemp ++ local LAST_ERR=/tmp/tmp.rC7v2LBvvI ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.hWJWqNvwNH ++ cat /tmp/tmp.rC7v2LBvvI ++ rm /tmp/tmp.hWJWqNvwNH /tmp/tmp.rC7v2LBvvI ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.e9YfDzZHq4 ++ mktemp + local LAST_ERR=/tmp/tmp.Nad765a7JD + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.e9YfDzZHq4 + cat /tmp/tmp.Nad765a7JD + rm /tmp/tmp.e9YfDzZHq4 /tmp/tmp.Nad765a7JD + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.9ebN8bx5Bl/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp2\n db.test.find()' + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9262M2s7Lt +++ mktemp ++ local LAST_ERR=/tmp/tmp.BTsaPHCj2e ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.9262M2s7Lt ++ cat /tmp/tmp.BTsaPHCj2e ++ rm /tmp/tmp.9262M2s7Lt /tmp/tmp.BTsaPHCj2e ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.Z4NSCBrFxb ++ mktemp + local LAST_ERR=/tmp/tmp.zCdvcBFhj6 + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.Z4NSCBrFxb + cat /tmp/tmp.zCdvcBFhj6 + rm /tmp/tmp.Z4NSCBrFxb /tmp/tmp.zCdvcBFhj6 + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.9ebN8bx5Bl/find2 + desc 'check backup and restore -- minio' + set +o xtrace ----------------------------------------------------------------------------------- check backup and restore -- minio ----------------------------------------------------------------------------------- 2024-06-10 14:13:00 55 myApp.test.gz Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("3592c0ae-44b3-40aa-af3d-d98005b093c3") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("f202fa4e-8ee3-4d86-8979-ccad0f66b517") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp1 WriteResult({ "nInserted" : 1 }) bye Percona Server for MongoDB shell version v4.4.29-28 connecting to: mongodb://some-name-mongos.demand-backup-sharded-2830.svc.cluster.local:27017/admin?compressors=disabled&gssapiServiceName=mongodb Implicit session: session { "id" : UUID("5ce8b1b8-ad2e-468c-aba5-b59c940bddc0") } Percona Server for MongoDB server version: v7.0.11-6 WARNING: shell and server versions do not match switched to db myApp2 WriteResult({ "nInserted" : 1 }) bye perconaservermongodbrestore.psmdb.percona.com/restore-backup-minio created waiting psmdb-restore/backup-minio to reach ready state............ + '[' 1 -eq 1 ']' + wait_cluster_consistency some-name + local cluster_name=some-name + local wait_time=32 + retry=0 + sleep 7 + echo -n 'waiting for cluster readyness' waiting for cluster readyness++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nGEoNh60VE +++ mktemp ++ local LAST_ERR=/tmp/tmp.hTqjFYq5TW ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.nGEoNh60VE ++ cat /tmp/tmp.hTqjFYq5TW ++ rm /tmp/tmp.nGEoNh60VE /tmp/tmp.hTqjFYq5TW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 1 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X2s478vQTf +++ mktemp ++ local LAST_ERR=/tmp/tmp.m3vl1nP4Aw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.X2s478vQTf ++ cat /tmp/tmp.m3vl1nP4Aw ++ rm /tmp/tmp.X2s478vQTf /tmp/tmp.m3vl1nP4Aw ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 2 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zgFFA0sSKd +++ mktemp ++ local LAST_ERR=/tmp/tmp.gHUwULB5hS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.zgFFA0sSKd ++ cat /tmp/tmp.gHUwULB5hS ++ rm /tmp/tmp.zgFFA0sSKd /tmp/tmp.gHUwULB5hS ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 3 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A7hHdupDj8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gIMdMw4dTB ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.A7hHdupDj8 ++ cat /tmp/tmp.gIMdMw4dTB ++ rm /tmp/tmp.A7hHdupDj8 /tmp/tmp.gIMdMw4dTB ++ return 0 + [[ error == \r\e\a\d\y ]] + let retry+=1 + '[' 4 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5WslHSnF25 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wlgZbAnkPH ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.5WslHSnF25 ++ cat /tmp/tmp.wlgZbAnkPH ++ rm /tmp/tmp.5WslHSnF25 /tmp/tmp.wlgZbAnkPH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 5 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PdoxfxqvRB +++ mktemp ++ local LAST_ERR=/tmp/tmp.QBUnbNeMax ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.PdoxfxqvRB ++ cat /tmp/tmp.QBUnbNeMax ++ rm /tmp/tmp.PdoxfxqvRB /tmp/tmp.QBUnbNeMax ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 6 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8jwCNvNU31 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uCAAZiJJvj ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.8jwCNvNU31 ++ cat /tmp/tmp.uCAAZiJJvj ++ rm /tmp/tmp.8jwCNvNU31 /tmp/tmp.uCAAZiJJvj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 7 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ai4WFRWCVm +++ mktemp ++ local LAST_ERR=/tmp/tmp.od1iRLiUVC ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.ai4WFRWCVm ++ cat /tmp/tmp.od1iRLiUVC ++ rm /tmp/tmp.ai4WFRWCVm /tmp/tmp.od1iRLiUVC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 8 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BxOCRfRBYu +++ mktemp ++ local LAST_ERR=/tmp/tmp.UrcIcqZ5Xe ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.BxOCRfRBYu ++ cat /tmp/tmp.UrcIcqZ5Xe ++ rm /tmp/tmp.BxOCRfRBYu /tmp/tmp.UrcIcqZ5Xe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 9 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5FdEsxJeYe +++ mktemp ++ local LAST_ERR=/tmp/tmp.QU53vrejYw ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.5FdEsxJeYe ++ cat /tmp/tmp.QU53vrejYw ++ rm /tmp/tmp.5FdEsxJeYe /tmp/tmp.QU53vrejYw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 10 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.as8PTUcY4S +++ mktemp ++ local LAST_ERR=/tmp/tmp.t9lYVta8wS ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.as8PTUcY4S ++ cat /tmp/tmp.t9lYVta8wS ++ rm /tmp/tmp.as8PTUcY4S /tmp/tmp.t9lYVta8wS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + let retry+=1 + '[' 11 -ge 32 ']' + echo -n . .+ sleep 10 ++ kubectl_bin get psmdb some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8GnZcfuruP +++ mktemp ++ local LAST_ERR=/tmp/tmp.ufMrQbHTJ6 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get psmdb some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.8GnZcfuruP ++ cat /tmp/tmp.ufMrQbHTJ6 ++ rm /tmp/tmp.8GnZcfuruP /tmp/tmp.ufMrQbHTJ6 ++ return 0 + [[ ready == \r\e\a\d\y ]] + check_data + local postfix= ++ seq 0 2 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 '' .svc.cluster.local myApp test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix= + local suffix=.svc.cluster.local + local database=myApp + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ogXN1LMPw +++ mktemp ++ local LAST_ERR=/tmp/tmp.zVrVabwei9 ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.2ogXN1LMPw ++ cat /tmp/tmp.zVrVabwei9 ++ rm /tmp/tmp.2ogXN1LMPw /tmp/tmp.zVrVabwei9 ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.xvirI9M53X ++ mktemp + local LAST_ERR=/tmp/tmp.lukXeQo1FQ + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.xvirI9M53X + cat /tmp/tmp.lukXeQo1FQ + rm /tmp/tmp.xvirI9M53X /tmp/tmp.lukXeQo1FQ + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find.json /tmp/tmp.9ebN8bx5Bl/find + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 1 .svc.cluster.local myApp1 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix=1 + local suffix=.svc.cluster.local + local database=myApp1 + local collection=test + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' + run_mongos 'use myApp1\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + local 'command=use myApp1\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2lcbwXDuDd +++ mktemp ++ local LAST_ERR=/tmp/tmp.crf0Rssagc ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.2lcbwXDuDd ++ cat /tmp/tmp.crf0Rssagc ++ rm /tmp/tmp.2lcbwXDuDd /tmp/tmp.crf0Rssagc ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.y6PbagglTL ++ mktemp + local LAST_ERR=/tmp/tmp.4GqYFuqRym + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp1\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.y6PbagglTL + cat /tmp/tmp.4GqYFuqRym + rm /tmp/tmp.y6PbagglTL /tmp/tmp.4GqYFuqRym + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find1.json /tmp/tmp.9ebN8bx5Bl/find1 + for i in '$(seq 0 2)' + compare_mongos_cmd find myApp:myPass@some-name-mongos.demand-backup-sharded-2830 2 .svc.cluster.local myApp2 test + local command=find + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local postfix=2 + local suffix=.svc.cluster.local + local database=myApp2 + local collection=test + run_mongos 'use myApp2\n db.test.find()' myApp:myPass@some-name-mongos.demand-backup-sharded-2830 mongodb .svc.cluster.local + egrep -v 'I NETWORK|W NETWORK|Error saving history file|Percona Server for MongoDB|connecting to:|Unable to reach primary for set|Implicit session:|versions do not match|Error saving history file:' + local 'command=use myApp2\n db.test.find()' + local uri=myApp:myPass@some-name-mongos.demand-backup-sharded-2830 + local driver=mongodb + local suffix=.svc.cluster.local + /usr/bin/sed -re 's/ObjectId\("[0-9a-f]+"\)//; s/-[0-9]+.svc/-xxx.svc/' ++ kubectl_bin get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mnpBKPYbTX +++ mktemp ++ local LAST_ERR=/tmp/tmp.xRm1N4lgoL ++ local exit_status=0 ++ local timeout=4 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=psmdb-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 -a -n 0 ']' ++ break ++ cat /tmp/tmp.mnpBKPYbTX ++ cat /tmp/tmp.xRm1N4lgoL ++ rm /tmp/tmp.mnpBKPYbTX /tmp/tmp.xRm1N4lgoL ++ return 0 + local client_container=psmdb-client-7469665986-tw5t7 + local mongo_flag= + kubectl_bin exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' ++ mktemp + local LAST_OUT=/tmp/tmp.vleGCE1mST ++ mktemp + local LAST_ERR=/tmp/tmp.74lv23tRkr + local exit_status=0 + local timeout=4 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec psmdb-client-7469665986-tw5t7 -- bash -c 'printf '\''use myApp2\n db.test.find()\n'\'' | mongo mongodb://myApp:myPass@some-name-mongos.demand-backup-sharded-2830.svc.cluster.local/admin ' + exit_status=0 + set -e + '[' 0 '!=' 0 -a -n 0 ']' + break + cat /tmp/tmp.vleGCE1mST + cat /tmp/tmp.74lv23tRkr + rm /tmp/tmp.vleGCE1mST /tmp/tmp.74lv23tRkr + return 0 + diff /mnt/jenkins/workspace/cloud-psmdb-operator_PR-1569/e2e-tests/demand-backup-sharded/compare/find2.json /tmp/tmp.9ebN8bx5Bl/find2 + desc 'delete backup and check if it is removed from bucket -- minio' + set +o xtrace ----------------------------------------------------------------------------------- delete backup and check if it is removed from bucket -- minio ----------------------------------------------------------------------------------- perconaservermongodbbackup.psmdb.percona.com "backup-aws-s3" deleted perconaservermongodbbackup.psmdb.percona.com "backup-azure-blob" deleted perconaservermongodbbackup.psmdb.percona.com "backup-gcp-cs" deleted perconaservermongodbbackup.psmdb.percona.com "backup-minio" deleted If you don't see a command prompt, try pressing enter. warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_demand-backup-sharded-2830 ----------------------------------------------------------------------------------- check for passwords leak ----------------------------------------------------------------------------------- secrets=YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 passwords=backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 backup123456 clusterAdmin123456 clusterMonitor123456 databaseAdmin123456 userAdmin123456 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 YmFja3VwMTIzNDU2 Y2x1c3RlckFkbWluMTIzNDU2 Y2x1c3Rlck1vbml0b3IxMjM0NTY= ZGF0YWJhc2VBZG1pbjEyMzQ1Ng== dXNlckFkbWluMTIzNDU2 pods=minio-service-57dd49b-5gbgp psmdb-client-7469665986-tw5t7 some-name-cfg-0 some-name-cfg-1 some-name-cfg-2 some-name-mongos-0 some-name-mongos-1 some-name-mongos-2 some-name-rs0-0 some-name-rs0-1 some-name-rs0-2 some-name-rs1-0 some-name-rs1-1 some-name-rs1-2 some-name-rs2-0 some-name-rs2-1 some-name-rs2-2 logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-minio-service-57dd49b-5gbgp-minio.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-psmdb-client-7469665986-tw5t7-psmdb-client.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-0-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-0-cfg-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-0-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-1-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-1-cfg-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-1-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-2-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-2-cfg-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-cfg-2-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-mongos-0-mongos.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-mongos-0-mongos-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-mongos-1-mongos.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-mongos-1-mongos-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-mongos-2-mongos.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-mongos-2-mongos-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs0-0-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs0-0-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs0-1-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs0-1-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs0-2-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs0-2-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs1-0-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs1-0-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs1-1-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs1-1-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs1-2-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs1-2-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-0-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-0-rs-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-0-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-1-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-1-rs-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-1-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-2-mongod.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-2-rs-sidecar-1.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-some-name-rs2-2-backup-agent.txt logs saved in: /tmp/tmp.9ebN8bx5Bl/logs_output-percona-server-mongodb-operator-8d99b8f59-jmdfw-percona-server-mongodb-operator.txt runtimeclass.node.k8s.io "container-rc" deleted ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- ----------------------------------------------------------------------------------- get and delete old CRDs and RBAC ----------------------------------------------------------------------------------- customresourcedefinition.apiextensions.k8s.io "perconaservermongodbbackups.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbrestores.psmdb.percona.com" deleted customresourcedefinition.apiextensions.k8s.io "perconaservermongodbs.psmdb.percona.com" deleted error: the server doesn't have a resource type "perconaservermongodbbackups" + kubectl patch perconaservermongodbbackups.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbbackups" E0610 14:27:16.952077 25319 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1: the server could not find the requested resource E0610 14:27:16.986495 25319 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-12-0: the server could not find the requested resource E0610 14:27:16.987254 25319 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-10-0: the server could not find the requested resource E0610 14:27:16.987680 25319 memcache.go:287] couldn't get resource list for psmdb.percona.com/v1-11-0: the server could not find the requested resource error: the server doesn't have a resource type "perconaservermongodbrestores" + kubectl patch perconaservermongodbrestores.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbrestores" error: the server doesn't have a resource type "perconaservermongodbs" + kubectl patch perconaservermongodbs.psmdb.percona.com -n sh --type=merge -p '{"metadata":{"finalizers":[]}}' error: the server doesn't have a resource type "perconaservermongodbs" clusterrole.rbac.authorization.k8s.io "percona-server-mongodb-operator" deleted clusterrolebinding.rbac.authorization.k8s.io "service-account-percona-server-mongodb-operator" deleted