=== RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. I0206 08:45:19.585628 12363 request.go:682] Waited for 1.032157835s due to client-side throttling, not priority and fairness, request: GET:https://34.123.138.24/apis/nodemanagement.gke.io/v1alpha1?timeout=32s harness.go:275: Successful connection to cluster at: https://34.123.138.24 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 180 seconds for each step harness.go:372: testsuite: e2e-tests/tests has 30 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup === PAUSE kuttl/harness/demand-backup === CONT kuttl/harness/demand-backup logger.go:42: 08:45:25 | demand-backup | Creating namespace: kuttl-test-awake-shepherd logger.go:42: 08:45:26 | demand-backup/0-minio-secret | starting test step 0-minio-secret logger.go:42: 08:45:29 | demand-backup/0-minio-secret | Secret:kuttl-test-awake-shepherd/minio-secret created logger.go:42: 08:45:29 | demand-backup/0-minio-secret | test step completed 0-minio-secret logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | starting test step 1-deploy-operator logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep kubectl -n "${NAMESPACE}" apply -f "${TESTS_CONFIG_DIR}/cloud-secret.yml" deploy_operator deploy_non_tls_cluster_secrets deploy_tls_cluster_secrets deploy_client deploy_minio] logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | + source ../../functions logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ realpath ../../.. logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | ++++ pwd logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | ++ test_name=demand-backup logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export GIT_BRANCH=PR-523 logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ GIT_BRANCH=PR-523 logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export VERSION=PR-523-f00253e logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ VERSION=PR-523-f00253e logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | ++++ which gdate logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | ++++ which date logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ date=/usr/bin/date logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ command -v oc logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ kubectl get nodes logger.go:42: 08:45:29 | demand-backup/1-deploy-operator | +++ grep '^minikube' logger.go:42: 08:45:30 | demand-backup/1-deploy-operator | E0206 08:45:30.466107 12499 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:30 | demand-backup/1-deploy-operator | E0206 08:45:30.675543 12499 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:30 | demand-backup/1-deploy-operator | E0206 08:45:30.781748 12499 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:30 | demand-backup/1-deploy-operator | E0206 08:45:30.886843 12499 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:31 | demand-backup/1-deploy-operator | + init_temp_dir logger.go:42: 08:45:31 | demand-backup/1-deploy-operator | + rm -rf /tmp/kuttl/ps/demand-backup logger.go:42: 08:45:31 | demand-backup/1-deploy-operator | + mkdir -p /tmp/kuttl/ps/demand-backup logger.go:42: 08:45:31 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-awake-shepherd apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf/cloud-secret.yml logger.go:42: 08:45:32 | demand-backup/1-deploy-operator | E0206 08:45:32.462136 12549 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:32 | demand-backup/1-deploy-operator | E0206 08:45:32.567860 12549 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:32 | demand-backup/1-deploy-operator | secret/aws-s3-secret created logger.go:42: 08:45:33 | demand-backup/1-deploy-operator | secret/gcp-cs-secret created logger.go:42: 08:45:33 | demand-backup/1-deploy-operator | secret/azure-secret created logger.go:42: 08:45:33 | demand-backup/1-deploy-operator | + deploy_operator logger.go:42: 08:45:33 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-awake-shepherd apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy/crd.yaml logger.go:42: 08:45:34 | demand-backup/1-deploy-operator | E0206 08:45:34.745700 12590 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:34 | demand-backup/1-deploy-operator | E0206 08:45:34.959668 12590 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:35 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 08:45:35 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 08:45:36 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 08:45:36 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-awake-shepherd apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy/rbac.yaml logger.go:42: 08:45:37 | demand-backup/1-deploy-operator | E0206 08:45:37.254592 12621 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:37 | demand-backup/1-deploy-operator | E0206 08:45:37.479532 12621 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:38 | demand-backup/1-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 08:45:38 | demand-backup/1-deploy-operator | serviceaccount/percona-server-mysql-operator-orchestrator created logger.go:42: 08:45:38 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 08:45:39 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 08:45:39 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-orchestrator created logger.go:42: 08:45:39 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 08:45:40 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 08:45:40 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-orchestrator created logger.go:42: 08:45:40 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 08:45:40 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "DEBUG"' logger.go:42: 08:45:40 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-awake-shepherd apply -f - logger.go:42: 08:45:40 | demand-backup/1-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:45:40 | demand-backup/1-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-523-f00253e"' /mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy/operator.yaml logger.go:42: 08:45:41 | demand-backup/1-deploy-operator | E0206 08:45:41.405368 12682 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:41 | demand-backup/1-deploy-operator | E0206 08:45:41.622747 12682 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:42 | demand-backup/1-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 08:45:42 | demand-backup/1-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 08:45:42 | demand-backup/1-deploy-operator | + deploy_non_tls_cluster_secrets logger.go:42: 08:45:42 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-awake-shepherd apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf/secrets.yaml logger.go:42: 08:45:43 | demand-backup/1-deploy-operator | E0206 08:45:43.854550 12731 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:44 | demand-backup/1-deploy-operator | E0206 08:45:44.065368 12731 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:44 | demand-backup/1-deploy-operator | secret/test-secrets created logger.go:42: 08:45:44 | demand-backup/1-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 08:45:44 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-awake-shepherd apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf/ssl-secret.yaml logger.go:42: 08:45:45 | demand-backup/1-deploy-operator | E0206 08:45:45.328413 12754 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:45 | demand-backup/1-deploy-operator | E0206 08:45:45.438161 12754 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:45 | demand-backup/1-deploy-operator | secret/test-ssl created logger.go:42: 08:45:45 | demand-backup/1-deploy-operator | + deploy_client logger.go:42: 08:45:45 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-awake-shepherd apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf/client.yaml logger.go:42: 08:45:46 | demand-backup/1-deploy-operator | E0206 08:45:46.594417 12783 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:46 | demand-backup/1-deploy-operator | E0206 08:45:46.806521 12783 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:47 | demand-backup/1-deploy-operator | pod/mysql-client created logger.go:42: 08:45:47 | demand-backup/1-deploy-operator | + deploy_minio logger.go:42: 08:45:47 | demand-backup/1-deploy-operator | + local access_key logger.go:42: 08:45:47 | demand-backup/1-deploy-operator | + local secret_key logger.go:42: 08:45:47 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-awake-shepherd get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 08:45:47 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 08:45:47 | demand-backup/1-deploy-operator | E0206 08:45:47.717426 12805 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:47 | demand-backup/1-deploy-operator | E0206 08:45:47.929904 12805 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:48 | demand-backup/1-deploy-operator | E0206 08:45:48.036520 12805 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:48 | demand-backup/1-deploy-operator | E0206 08:45:48.143168 12805 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:48 | demand-backup/1-deploy-operator | + access_key='some-access$\n"-key' logger.go:42: 08:45:48 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-awake-shepherd get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 08:45:48 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 08:45:48 | demand-backup/1-deploy-operator | E0206 08:45:48.829031 12829 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | E0206 08:45:49.140961 12829 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | E0206 08:45:49.247198 12829 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | E0206 08:45:49.353081 12829 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | + secret_key='some-$\n"secret-key' logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | + helm uninstall -n kuttl-test-awake-shepherd minio-service logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-523/kubeconfig logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-523/kubeconfig logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | Error: uninstall: Release not loaded: minio-service: release: not found logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | + : logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | + helm repo remove minio logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-523/kubeconfig logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-523/kubeconfig logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | Error: no repositories configured logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | + : logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | + helm repo add minio https://charts.min.io/ logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-523/kubeconfig logger.go:42: 08:45:49 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-523/kubeconfig logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | "minio" has been added to your repositories logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | +++ printf %q 'some-access$\n"-key' logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | ++ printf %q 'some-access\$\\n\"-key' logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | +++ printf %q 'some-$\n"secret-key' logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | ++ printf %q 'some-\$\\n\"secret-key' logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | + retry 10 60 helm install minio-service -n kuttl-test-awake-shepherd --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access\\\$\\\\n\\\"-key' --set 'users[0].secretKey=some-\\\$\\\\n\\\"secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | + local max=10 logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | + local delay=60 logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | + shift 2 logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | + local n=1 logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | + helm install minio-service -n kuttl-test-awake-shepherd --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access\\\$\\\\n\\\"-key' --set 'users[0].secretKey=some-\\\$\\\\n\\\"secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-523/kubeconfig logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-523/kubeconfig logger.go:42: 08:45:50 | demand-backup/1-deploy-operator | E0206 08:45:50.999748 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:51 | demand-backup/1-deploy-operator | E0206 08:45:51.208381 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:53 | demand-backup/1-deploy-operator | E0206 08:45:53.539698 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:53 | demand-backup/1-deploy-operator | E0206 08:45:53.645113 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:54 | demand-backup/1-deploy-operator | E0206 08:45:54.001495 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:54 | demand-backup/1-deploy-operator | E0206 08:45:54.118877 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:54 | demand-backup/1-deploy-operator | E0206 08:45:54.676522 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:54 | demand-backup/1-deploy-operator | E0206 08:45:54.787176 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:55 | demand-backup/1-deploy-operator | E0206 08:45:55.158350 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:55 | demand-backup/1-deploy-operator | E0206 08:45:55.268226 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:55 | demand-backup/1-deploy-operator | E0206 08:45:55.624631 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:55 | demand-backup/1-deploy-operator | E0206 08:45:55.735376 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:56 | demand-backup/1-deploy-operator | E0206 08:45:56.085054 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:56 | demand-backup/1-deploy-operator | E0206 08:45:56.210344 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:56 | demand-backup/1-deploy-operator | E0206 08:45:56.566878 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:56 | demand-backup/1-deploy-operator | E0206 08:45:56.673398 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:58 | demand-backup/1-deploy-operator | E0206 08:45:58.673219 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:58 | demand-backup/1-deploy-operator | E0206 08:45:58.783683 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:59 | demand-backup/1-deploy-operator | E0206 08:45:59.479909 12889 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:45:59 | demand-backup/1-deploy-operator | E0206 08:45:59.588486 12889 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | NAME: minio-service logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | LAST DEPLOYED: Tue Feb 6 08:45:51 2024 logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | NAMESPACE: kuttl-test-awake-shepherd logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | STATUS: deployed logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | REVISION: 1 logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | TEST SUITE: None logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | NOTES: logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | MinIO can be accessed via port 9000 on the following DNS name from within your cluster: logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | minio-service.kuttl-test-awake-shepherd.svc.cluster.local logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | To access MinIO from localhost, run the below commands: logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | 1. export POD_NAME=$(kubectl get pods --namespace kuttl-test-awake-shepherd -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | 2. kubectl port-forward $POD_NAME 9000 --namespace kuttl-test-awake-shepherd logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace kuttl-test-awake-shepherd minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace kuttl-test-awake-shepherd minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | 3. mc ls minio-service-local logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-awake-shepherd get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | + MINIO_POD=minio-service-85cdcd4d44-jfmbv logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | + wait_pod minio-service-85cdcd4d44-jfmbv logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | + local pod=minio-service-85cdcd4d44-jfmbv logger.go:42: 08:46:40 | demand-backup/1-deploy-operator | + set +o xtrace logger.go:42: 08:46:41 | demand-backup/1-deploy-operator | minio-service-85cdcd4d44-jfmbvtrue logger.go:42: 08:46:41 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-awake-shepherd run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID='\''some-access$\n"-key'\'' AWS_SECRET_ACCESS_KEY='\''some-$\n"secret-key'\'' AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' logger.go:42: 08:46:46 | demand-backup/1-deploy-operator | If you don't see a command prompt, try pressing enter. logger.go:42: 08:46:46 | demand-backup/1-deploy-operator | warning: couldn't attach to pod/aws-cli, falling back to streaming logs: unable to upgrade connection: container aws-cli not found in pod aws-cli_kuttl-test-awake-shepherd logger.go:42: 08:46:46 | demand-backup/1-deploy-operator | make_bucket: operator-testing logger.go:42: 08:46:48 | demand-backup/1-deploy-operator | pod "aws-cli" deleted logger.go:42: 08:46:50 | demand-backup/1-deploy-operator | test step completed 1-deploy-operator logger.go:42: 08:46:50 | demand-backup/2-create-cluster | starting test step 2-create-cluster logger.go:42: 08:46:50 | demand-backup/2-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval '.spec.mysql.size=3' - \ | yq eval '.spec.proxy.haproxy.enabled=true' - \ | yq eval '.spec.proxy.haproxy.size=3' - \ | yq eval '.spec.orchestrator.enabled=true' - \ | yq eval '.spec.orchestrator.size=3' - \ | yq eval '.spec.backup.storages.minio.type="s3"' - \ | yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - \ | yq eval '.spec.backup.storages.minio.s3.endpointUrl="http://minio-service:9000"' - \ | yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.aws-s3.type="s3"' - \ | yq eval '.spec.backup.storages.aws-s3.verifyTLS=true' - \ | yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - \ | yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - \ | yq eval '.spec.backup.storages.gcp-cs.verifyTLS=true' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - \ | yq eval '.spec.backup.storages.azure-blob.type="azure"' - \ | yq eval '.spec.backup.storages.azure-blob.verifyTLS=true' - \ | yq eval '.spec.backup.storages.azure-blob.azure.containerName="operator-testing"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + source ../../functions logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ realpath ../../.. logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++++ pwd logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ test_name=demand-backup logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export GIT_BRANCH=PR-523 logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ GIT_BRANCH=PR-523 logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export VERSION=PR-523-f00253e logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ VERSION=PR-523-f00253e logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++++ which gdate logger.go:42: 08:46:50 | demand-backup/2-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++++ which date logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ date=/usr/bin/date logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ command -v oc logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ kubectl get nodes logger.go:42: 08:46:50 | demand-backup/2-create-cluster | +++ grep '^minikube' logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval .spec.mysql.size=3 - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.endpointUrl="http://minio-service:9000"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + get_cr logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + local name_suffix= logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + kubectl -n kuttl-test-awake-shepherd apply -f - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.type="azure"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.type="s3"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.aws-s3.verifyTLS=true - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.size=3 - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.azure-blob.verifyTLS=true - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.type="s3"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + '[' -n '' ']' logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.containerName="operator-testing"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:dev-latest logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:dev-latest"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.gcp-cs.verifyTLS=true - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.metadata.name="%s"' demand-backup logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.metadata.name="demand-backup"' /mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy/cr.yaml logger.go:42: 08:46:50 | demand-backup/2-create-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:46:50 | demand-backup/2-create-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-523-f00253e"' - logger.go:42: 08:46:52 | demand-backup/2-create-cluster | perconaservermysql.ps.percona.com/demand-backup created logger.go:42: 08:51:45 | demand-backup/2-create-cluster | test step completed 2-create-cluster logger.go:42: 08:51:45 | demand-backup/3-write-data | starting test step 3-write-data logger.go:42: 08:51:45 | demand-backup/3-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"] logger.go:42: 08:51:45 | demand-backup/3-write-data | + source ../../functions logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ realpath ../../.. logger.go:42: 08:51:45 | demand-backup/3-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:51:45 | demand-backup/3-write-data | ++++ pwd logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 08:51:45 | demand-backup/3-write-data | ++ test_name=demand-backup logger.go:42: 08:51:45 | demand-backup/3-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:51:45 | demand-backup/3-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ GIT_BRANCH=PR-523 logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export VERSION=PR-523-f00253e logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ VERSION=PR-523-f00253e logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:51:45 | demand-backup/3-write-data | ++++ which gdate logger.go:42: 08:51:45 | demand-backup/3-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 08:51:45 | demand-backup/3-write-data | ++++ which date logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ date=/usr/bin/date logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ command -v oc logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ kubectl get nodes logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ grep '^minikube' logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 08:51:45 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 08:51:46 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 08:51:46 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 08:51:46 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 08:51:46 | demand-backup/3-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:51:46 | demand-backup/3-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 08:51:46 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:51:46 | demand-backup/3-write-data | + local pod= logger.go:42: 08:51:46 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 08:51:46 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 08:51:46 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 08:51:46 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 08:51:46 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 08:51:46 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 08:51:46 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 08:51:46 | demand-backup/3-write-data | + kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:51:46 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 08:51:46 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 08:51:47 | demand-backup/3-write-data | + : logger.go:42: 08:51:47 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 08:51:47 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 08:51:48 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 08:51:48 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 08:51:48 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 08:51:48 | demand-backup/3-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:51:48 | demand-backup/3-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)' logger.go:42: 08:51:48 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:51:48 | demand-backup/3-write-data | + local pod= logger.go:42: 08:51:48 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 08:51:48 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 08:51:48 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 08:51:48 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 08:51:48 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 08:51:48 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 08:51:49 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 08:51:49 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 08:51:49 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 08:51:49 | demand-backup/3-write-data | + kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:51:50 | demand-backup/3-write-data | + : logger.go:42: 08:51:51 | demand-backup/3-write-data | test step completed 3-write-data logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | starting test step 4-move-primary-before-backup logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary_pod_from_label="$(get_primary_from_label)" kubectl delete pod -n ${NAMESPACE} ${primary_pod_from_label} wait_cluster_consistency_async "${test_name}" "3" "3" new_primary_pod_from_label="$(get_primary_from_label)" if [ "${primary_pod_from_label}" == "${new_primary_pod_from_label}" ]; then echo "Old (${primary_pod_from_label}) and new (${new_primary_pod_from_label}) primary are the same (the failover didn't happen)!" exit 1 fi] logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | + source ../../functions logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ realpath ../../.. logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++++ pwd logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++ test_name=demand-backup logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export GIT_BRANCH=PR-523 logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ GIT_BRANCH=PR-523 logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export VERSION=PR-523-f00253e logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ VERSION=PR-523-f00253e logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++++ which gdate logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++++ which date logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ date=/usr/bin/date logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ command -v oc logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ kubectl get nodes logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | +++ grep '^minikube' logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-awake-shepherd get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | + primary_pod_from_label=demand-backup-mysql-0 logger.go:42: 08:51:51 | demand-backup/4-move-primary-before-backup | + kubectl delete pod -n kuttl-test-awake-shepherd demand-backup-mysql-0 logger.go:42: 08:51:52 | demand-backup/4-move-primary-before-backup | pod "demand-backup-mysql-0" deleted logger.go:42: 08:52:12 | demand-backup/4-move-primary-before-backup | + wait_cluster_consistency_async demand-backup 3 3 logger.go:42: 08:52:12 | demand-backup/4-move-primary-before-backup | + local cluster_name=demand-backup logger.go:42: 08:52:12 | demand-backup/4-move-primary-before-backup | + local cluster_size=3 logger.go:42: 08:52:12 | demand-backup/4-move-primary-before-backup | + local orc_size=3 logger.go:42: 08:52:12 | demand-backup/4-move-primary-before-backup | + '[' -z 3 ']' logger.go:42: 08:52:12 | demand-backup/4-move-primary-before-backup | + sleep 7 logger.go:42: 08:52:19 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.mysql.state}' logger.go:42: 08:52:20 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 08:52:20 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 08:52:20 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 08:52:20 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 08:52:35 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.mysql.state}' logger.go:42: 08:52:35 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 08:52:35 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 08:52:35 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 08:52:35 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 08:52:50 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.mysql.state}' logger.go:42: 08:52:51 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 08:52:51 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 08:52:51 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 08:52:51 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 08:53:06 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.mysql.state}' logger.go:42: 08:53:06 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 08:53:06 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 08:53:06 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 08:53:06 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 08:53:21 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.mysql.state}' logger.go:42: 08:53:22 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 08:53:22 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 08:53:22 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 08:53:22 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 08:53:37 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.mysql.state}' logger.go:42: 08:53:37 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 08:53:37 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 08:53:37 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 08:53:37 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 08:53:52 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.mysql.state}' logger.go:42: 08:53:52 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 08:53:52 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.mysql.ready}' logger.go:42: 08:53:53 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 08:53:53 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.orchestrator.ready}' logger.go:42: 08:53:53 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 08:53:53 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.orchestrator.state}' logger.go:42: 08:53:54 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 08:53:54 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-awake-shepherd -o 'jsonpath={.status.state}' logger.go:42: 08:53:54 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 08:53:54 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 08:53:54 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-awake-shepherd get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 08:53:55 | demand-backup/4-move-primary-before-backup | + new_primary_pod_from_label=demand-backup-mysql-2 logger.go:42: 08:53:55 | demand-backup/4-move-primary-before-backup | + '[' demand-backup-mysql-0 == demand-backup-mysql-2 ']' logger.go:42: 08:53:56 | demand-backup/4-move-primary-before-backup | test step completed 4-move-primary-before-backup logger.go:42: 08:53:56 | demand-backup/5-create-backup-minio | starting test step 5-create-backup-minio logger.go:42: 08:53:57 | demand-backup/5-create-backup-minio | PerconaServerMySQLBackup:kuttl-test-awake-shepherd/demand-backup-minio created logger.go:42: 08:54:08 | demand-backup/5-create-backup-minio | test step completed 5-create-backup-minio logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | starting test step 6-check-password-leak logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | + source ../../functions logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ realpath ../../.. logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | ++++ pwd logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | ++ test_name=demand-backup logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export GIT_BRANCH=PR-523 logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ GIT_BRANCH=PR-523 logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export VERSION=PR-523-f00253e logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ VERSION=PR-523-f00253e logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | ++++ which gdate logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | ++++ which date logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ date=/usr/bin/date logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ command -v oc logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ kubectl get nodes logger.go:42: 08:54:08 | demand-backup/6-check-password-leak | +++ grep '^minikube' logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | + check_passwords_leak logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | + local secrets logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | + local passwords logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | + local pods logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12") or test("namespace")) | not) | .value' logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | + secrets= logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | + passwords=' ' logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pods -o name logger.go:42: 08:54:09 | demand-backup/6-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + pods='demand-backup-haproxy-0 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | demand-backup-haproxy-1 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | demand-backup-haproxy-2 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | demand-backup-mysql-0 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | demand-backup-mysql-1 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | demand-backup-mysql-2 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | demand-backup-orc-0 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | demand-backup-orc-1 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | demand-backup-orc-2 logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | minio-service-85cdcd4d44-jfmbv logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | mysql-client logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | percona-server-mysql-operator-b4c599bbb-cn75v logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | xb-demand-backup-minio-minio-hzfzk' logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + collect_logs kuttl-test-awake-shepherd logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + local containers logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + local count logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + NS=kuttl-test-awake-shepherd logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:10 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-0 -c haproxy logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-0 -c mysql-monit logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:11 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:12 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 08:54:12 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:12 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-1 -c haproxy logger.go:42: 08:54:12 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 08:54:12 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 08:54:12 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:12 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-1 -c mysql-monit logger.go:42: 08:54:13 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 08:54:13 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 08:54:13 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:13 | demand-backup/6-check-password-leak | logger.go:42: 08:54:13 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:13 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:14 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 08:54:14 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:14 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-2 -c haproxy logger.go:42: 08:54:14 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 08:54:14 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 08:54:14 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:14 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-2 -c mysql-monit logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:15 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-0 -c mysql logger.go:42: 08:54:16 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 08:54:16 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 08:54:16 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:16 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-0 -c xtrabackup logger.go:42: 08:54:16 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 08:54:16 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 08:54:16 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:16 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-0 -c pt-heartbeat logger.go:42: 08:54:17 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 08:54:17 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 08:54:17 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:17 | demand-backup/6-check-password-leak | logger.go:42: 08:54:17 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:17 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:18 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 08:54:18 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:18 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-1 -c mysql logger.go:42: 08:54:18 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 08:54:18 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 08:54:18 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:18 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-1 -c xtrabackup logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-1 -c pt-heartbeat logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:19 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:20 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 08:54:20 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:20 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-2 -c mysql logger.go:42: 08:54:20 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 08:54:20 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 08:54:20 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:20 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-2 -c xtrabackup logger.go:42: 08:54:21 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 08:54:21 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 08:54:21 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:21 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-2 -c pt-heartbeat logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:22 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-0 -c orc logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-0 -c mysql-monit logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:23 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:24 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 08:54:24 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:24 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-1 -c orc logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-1 -c mysql-monit logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:25 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:26 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 08:54:26 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:26 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-2 -c orc logger.go:42: 08:54:26 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 08:54:26 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 08:54:26 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:26 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-2 -c mysql-monit logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod minio-service-85cdcd4d44-jfmbv -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | + containers=minio logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:27 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs minio-service-85cdcd4d44-jfmbv -c minio logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-85cdcd4d44-jfmbv-minio.txt logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-85cdcd4d44-jfmbv-minio.txt logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | + containers=mysql-client logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:28 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs mysql-client -c mysql-client logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod percona-server-mysql-operator-b4c599bbb-cn75v -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | + containers=manager logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:29 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs percona-server-mysql-operator-b4c599bbb-cn75v -c manager logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-b4c599bbb-cn75v-manager.txt logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-b4c599bbb-cn75v-manager.txt logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod xb-demand-backup-minio-minio-hzfzk -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | + containers=xtrabackup logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 08:54:30 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs xb-demand-backup-minio-minio-hzfzk -c xtrabackup logger.go:42: 08:54:31 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-hzfzk-xtrabackup.txt logger.go:42: 08:54:31 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-hzfzk-xtrabackup.txt logger.go:42: 08:54:31 | demand-backup/6-check-password-leak | + echo logger.go:42: 08:54:31 | demand-backup/6-check-password-leak | logger.go:42: 08:54:31 | demand-backup/6-check-password-leak | + '[' -n '' ']' logger.go:42: 08:54:32 | demand-backup/6-check-password-leak | test step completed 6-check-password-leak logger.go:42: 08:54:32 | demand-backup/7-delete-data | starting test step 7-delete-data logger.go:42: 08:54:32 | demand-backup/7-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 08:54:32 | demand-backup/7-delete-data | + source ../../functions logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ realpath ../../.. logger.go:42: 08:54:32 | demand-backup/7-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:54:32 | demand-backup/7-delete-data | ++++ pwd logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 08:54:32 | demand-backup/7-delete-data | ++ test_name=demand-backup logger.go:42: 08:54:32 | demand-backup/7-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:54:32 | demand-backup/7-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ GIT_BRANCH=PR-523 logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export VERSION=PR-523-f00253e logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ VERSION=PR-523-f00253e logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:54:32 | demand-backup/7-delete-data | ++++ which gdate logger.go:42: 08:54:32 | demand-backup/7-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 08:54:32 | demand-backup/7-delete-data | ++++ which date logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ date=/usr/bin/date logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ command -v oc logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ kubectl get nodes logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ grep '^minikube' logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ get_cluster_name logger.go:42: 08:54:32 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 08:54:33 | demand-backup/7-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 08:54:33 | demand-backup/7-delete-data | ++ local cluster=demand-backup logger.go:42: 08:54:33 | demand-backup/7-delete-data | ++ echo demand-backup-haproxy logger.go:42: 08:54:33 | demand-backup/7-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:54:33 | demand-backup/7-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 08:54:33 | demand-backup/7-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:54:33 | demand-backup/7-delete-data | + local pod= logger.go:42: 08:54:33 | demand-backup/7-delete-data | ++ get_client_pod logger.go:42: 08:54:33 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 08:54:33 | demand-backup/7-delete-data | + client_pod=mysql-client logger.go:42: 08:54:33 | demand-backup/7-delete-data | + wait_pod mysql-client logger.go:42: 08:54:33 | demand-backup/7-delete-data | + local pod=mysql-client logger.go:42: 08:54:33 | demand-backup/7-delete-data | + set +o xtrace logger.go:42: 08:54:33 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 08:54:33 | demand-backup/7-delete-data | + kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 08:54:33 | demand-backup/7-delete-data | + sed -e 's/mysql: //' logger.go:42: 08:54:33 | demand-backup/7-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 08:54:34 | demand-backup/7-delete-data | + : logger.go:42: 08:54:34 | demand-backup/7-delete-data | ++ get_cluster_name logger.go:42: 08:54:34 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 08:54:35 | demand-backup/7-delete-data | + cluster_name=demand-backup logger.go:42: 08:54:35 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 08:54:35 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:35 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 08:54:35 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:35 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 08:54:35 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 08:54:35 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 08:54:35 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 08:54:35 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 08:54:35 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 08:54:35 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 08:54:36 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 08:54:36 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:36 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 08:54:36 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 08:54:37 | demand-backup/7-delete-data | ++ : logger.go:42: 08:54:37 | demand-backup/7-delete-data | + data= logger.go:42: 08:54:37 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 04-delete-data-minio-0 --from-literal=data= logger.go:42: 08:54:37 | demand-backup/7-delete-data | configmap/04-delete-data-minio-0 created logger.go:42: 08:54:37 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 08:54:37 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:37 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 08:54:37 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:37 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 08:54:37 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 08:54:37 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 08:54:38 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 08:54:38 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 08:54:38 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 08:54:38 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 08:54:38 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 08:54:38 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:38 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 08:54:38 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 08:54:39 | demand-backup/7-delete-data | ++ : logger.go:42: 08:54:39 | demand-backup/7-delete-data | + data= logger.go:42: 08:54:39 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 04-delete-data-minio-1 --from-literal=data= logger.go:42: 08:54:39 | demand-backup/7-delete-data | configmap/04-delete-data-minio-1 created logger.go:42: 08:54:39 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 08:54:39 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:39 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 08:54:39 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:39 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 08:54:40 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 08:54:40 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 08:54:40 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 08:54:40 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 08:54:40 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 08:54:40 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 08:54:40 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 08:54:40 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 08:54:40 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 08:54:40 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 08:54:41 | demand-backup/7-delete-data | ++ : logger.go:42: 08:54:41 | demand-backup/7-delete-data | + data= logger.go:42: 08:54:41 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 04-delete-data-minio-2 --from-literal=data= logger.go:42: 08:54:42 | demand-backup/7-delete-data | configmap/04-delete-data-minio-2 created logger.go:42: 08:54:43 | demand-backup/7-delete-data | test step completed 7-delete-data logger.go:42: 08:54:43 | demand-backup/8-restore-from-minio | starting test step 8-restore-from-minio logger.go:42: 08:54:44 | demand-backup/8-restore-from-minio | PerconaServerMySQLRestore:kuttl-test-awake-shepherd/demand-backup-restore-minio created logger.go:42: 08:59:45 | demand-backup/8-restore-from-minio | test step completed 8-restore-from-minio logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | starting test step 9-check-password-leak logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | + source ../../functions logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ realpath ../../.. logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | ++++ pwd logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | ++ test_name=demand-backup logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export GIT_BRANCH=PR-523 logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ GIT_BRANCH=PR-523 logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export VERSION=PR-523-f00253e logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ VERSION=PR-523-f00253e logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | ++++ which gdate logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | ++++ which date logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ date=/usr/bin/date logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ command -v oc logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ kubectl get nodes logger.go:42: 08:59:45 | demand-backup/9-check-password-leak | +++ grep '^minikube' logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | + check_passwords_leak logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | + local secrets logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | + local passwords logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | + local pods logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12") or test("namespace")) | not) | .value' logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | + secrets= logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | + passwords=' ' logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pods -o name logger.go:42: 08:59:46 | demand-backup/9-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + pods='demand-backup-haproxy-0 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | demand-backup-haproxy-1 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | demand-backup-haproxy-2 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | demand-backup-mysql-0 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | demand-backup-mysql-1 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | demand-backup-mysql-2 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | demand-backup-orc-0 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | demand-backup-orc-1 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | demand-backup-orc-2 logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | minio-service-85cdcd4d44-jfmbv logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | mysql-client logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | percona-server-mysql-operator-b4c599bbb-cn75v logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | xb-demand-backup-minio-minio-hzfzk logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | xb-restore-demand-backup-restore-minio-c9x8v' logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + collect_logs kuttl-test-awake-shepherd logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + local containers logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + local count logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + NS=kuttl-test-awake-shepherd logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:47 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-0 -c haproxy logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-0 -c mysql-monit logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | + echo logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 08:59:48 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:59:49 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 08:59:49 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:49 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-1 -c haproxy logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-1 -c mysql-monit logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | + echo logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 08:59:50 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:59:51 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 08:59:51 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:51 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-2 -c haproxy logger.go:42: 08:59:51 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 08:59:51 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 08:59:51 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:51 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-haproxy-2 -c mysql-monit logger.go:42: 08:59:52 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 08:59:52 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 08:59:52 | demand-backup/9-check-password-leak | + echo logger.go:42: 08:59:52 | demand-backup/9-check-password-leak | logger.go:42: 08:59:52 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 08:59:52 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:59:53 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 08:59:53 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:53 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-0 -c mysql logger.go:42: 08:59:53 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 08:59:53 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 08:59:53 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:53 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-0 -c xtrabackup logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-0 -c pt-heartbeat logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | + echo logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 08:59:54 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:59:55 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 08:59:55 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:55 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-1 -c mysql logger.go:42: 08:59:55 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 08:59:55 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 08:59:55 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:55 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-1 -c xtrabackup logger.go:42: 08:59:56 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 08:59:56 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 08:59:56 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:56 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-1 -c pt-heartbeat logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | + echo logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:57 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-2 -c mysql logger.go:42: 08:59:58 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 08:59:58 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 08:59:58 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:58 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-2 -c xtrabackup logger.go:42: 08:59:58 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 08:59:58 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 08:59:58 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:58 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-mysql-2 -c pt-heartbeat logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | + echo logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 08:59:59 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-0 -c orc logger.go:42: 09:00:00 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 09:00:00 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 09:00:00 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:00 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-0 -c mysql-monit logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | + echo logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:01 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-1 -c orc logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-1 -c mysql-monit logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | + echo logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 09:00:02 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod demand-backup-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 09:00:03 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 09:00:03 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:03 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-2 -c orc logger.go:42: 09:00:03 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 09:00:03 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 09:00:03 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:03 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs demand-backup-orc-2 -c mysql-monit logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | + echo logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod minio-service-85cdcd4d44-jfmbv -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | + containers=minio logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:04 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs minio-service-85cdcd4d44-jfmbv -c minio logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-85cdcd4d44-jfmbv-minio.txt logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-85cdcd4d44-jfmbv-minio.txt logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | + echo logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | + containers=mysql-client logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:05 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs mysql-client -c mysql-client logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | + echo logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod percona-server-mysql-operator-b4c599bbb-cn75v -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | + containers=manager logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:06 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs percona-server-mysql-operator-b4c599bbb-cn75v -c manager logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-b4c599bbb-cn75v-manager.txt logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-b4c599bbb-cn75v-manager.txt logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | + echo logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod xb-demand-backup-minio-minio-hzfzk -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | + containers=xtrabackup logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:07 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs xb-demand-backup-minio-minio-hzfzk -c xtrabackup logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-hzfzk-xtrabackup.txt logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-hzfzk-xtrabackup.txt logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | + echo logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-awake-shepherd get pod xb-restore-demand-backup-restore-minio-c9x8v -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | + containers=xtrabackup logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 09:00:08 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-awake-shepherd logs xb-restore-demand-backup-restore-minio-c9x8v -c xtrabackup logger.go:42: 09:00:09 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-restore-demand-backup-restore-minio-c9x8v-xtrabackup.txt logger.go:42: 09:00:09 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-restore-demand-backup-restore-minio-c9x8v-xtrabackup.txt logger.go:42: 09:00:09 | demand-backup/9-check-password-leak | + echo logger.go:42: 09:00:09 | demand-backup/9-check-password-leak | logger.go:42: 09:00:09 | demand-backup/9-check-password-leak | + '[' -n '' ']' logger.go:42: 09:00:10 | demand-backup/9-check-password-leak | test step completed 9-check-password-leak logger.go:42: 09:00:10 | demand-backup/10-read-data | starting test step 10-read-data logger.go:42: 09:00:10 | demand-backup/10-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 09:00:10 | demand-backup/10-read-data | + source ../../functions logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ realpath ../../.. logger.go:42: 09:00:10 | demand-backup/10-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:10 | demand-backup/10-read-data | ++++ pwd logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:00:10 | demand-backup/10-read-data | ++ test_name=demand-backup logger.go:42: 09:00:10 | demand-backup/10-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:00:10 | demand-backup/10-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:00:10 | demand-backup/10-read-data | ++++ which gdate logger.go:42: 09:00:10 | demand-backup/10-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:00:10 | demand-backup/10-read-data | ++++ which date logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ date=/usr/bin/date logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ command -v oc logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ kubectl get nodes logger.go:42: 09:00:10 | demand-backup/10-read-data | +++ grep '^minikube' logger.go:42: 09:00:10 | demand-backup/10-read-data | ++ get_cluster_name logger.go:42: 09:00:10 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:00:11 | demand-backup/10-read-data | + cluster_name=demand-backup logger.go:42: 09:00:11 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 09:00:11 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:11 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:00:11 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:11 | demand-backup/10-read-data | ++ local pod= logger.go:42: 09:00:11 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 09:00:11 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:00:11 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 09:00:11 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 09:00:11 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 09:00:11 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 09:00:12 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 09:00:12 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:00:12 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:00:12 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:13 | demand-backup/10-read-data | + data=100500 logger.go:42: 09:00:13 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-minio-0 --from-literal=data=100500 logger.go:42: 09:00:13 | demand-backup/10-read-data | configmap/06-read-data-minio-0 created logger.go:42: 09:00:13 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 09:00:13 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:13 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:00:13 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:13 | demand-backup/10-read-data | ++ local pod= logger.go:42: 09:00:13 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 09:00:13 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:00:14 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 09:00:14 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 09:00:14 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 09:00:14 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 09:00:14 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 09:00:14 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:14 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:00:14 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:00:15 | demand-backup/10-read-data | + data=100500 logger.go:42: 09:00:15 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-minio-1 --from-literal=data=100500 logger.go:42: 09:00:15 | demand-backup/10-read-data | configmap/06-read-data-minio-1 created logger.go:42: 09:00:15 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 09:00:15 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:15 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:00:15 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:15 | demand-backup/10-read-data | ++ local pod= logger.go:42: 09:00:15 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 09:00:15 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:00:16 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 09:00:16 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 09:00:16 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 09:00:16 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 09:00:16 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 09:00:16 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:16 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:00:16 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:00:17 | demand-backup/10-read-data | + data=100500 logger.go:42: 09:00:17 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-minio-2 --from-literal=data=100500 logger.go:42: 09:00:18 | demand-backup/10-read-data | configmap/06-read-data-minio-2 created logger.go:42: 09:00:19 | demand-backup/10-read-data | test step completed 10-read-data logger.go:42: 09:00:19 | demand-backup/11-delete-data | starting test step 11-delete-data logger.go:42: 09:00:19 | demand-backup/11-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2 do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 09:00:19 | demand-backup/11-delete-data | + source ../../functions logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ realpath ../../.. logger.go:42: 09:00:19 | demand-backup/11-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:19 | demand-backup/11-delete-data | ++++ pwd logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:00:19 | demand-backup/11-delete-data | ++ test_name=demand-backup logger.go:42: 09:00:19 | demand-backup/11-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:00:19 | demand-backup/11-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:00:19 | demand-backup/11-delete-data | ++++ which gdate logger.go:42: 09:00:19 | demand-backup/11-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:00:19 | demand-backup/11-delete-data | ++++ which date logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ date=/usr/bin/date logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ command -v oc logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ kubectl get nodes logger.go:42: 09:00:19 | demand-backup/11-delete-data | +++ grep '^minikube' logger.go:42: 09:00:20 | demand-backup/11-delete-data | +++ get_cluster_name logger.go:42: 09:00:20 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:00:20 | demand-backup/11-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 09:00:20 | demand-backup/11-delete-data | ++ local cluster=demand-backup logger.go:42: 09:00:20 | demand-backup/11-delete-data | ++ echo demand-backup-haproxy logger.go:42: 09:00:20 | demand-backup/11-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:00:20 | demand-backup/11-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 09:00:20 | demand-backup/11-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:00:20 | demand-backup/11-delete-data | + local pod= logger.go:42: 09:00:20 | demand-backup/11-delete-data | ++ get_client_pod logger.go:42: 09:00:20 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:00:20 | demand-backup/11-delete-data | + client_pod=mysql-client logger.go:42: 09:00:20 | demand-backup/11-delete-data | + wait_pod mysql-client logger.go:42: 09:00:20 | demand-backup/11-delete-data | + local pod=mysql-client logger.go:42: 09:00:20 | demand-backup/11-delete-data | + set +o xtrace logger.go:42: 09:00:21 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 09:00:21 | demand-backup/11-delete-data | + sed -e 's/mysql: //' logger.go:42: 09:00:21 | demand-backup/11-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:00:21 | demand-backup/11-delete-data | + kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:00:22 | demand-backup/11-delete-data | + : logger.go:42: 09:00:22 | demand-backup/11-delete-data | ++ get_cluster_name logger.go:42: 09:00:22 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:00:22 | demand-backup/11-delete-data | + cluster_name=demand-backup logger.go:42: 09:00:22 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 09:00:22 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:22 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:00:22 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:22 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 09:00:22 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 09:00:22 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:00:23 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 09:00:23 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 09:00:23 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 09:00:23 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 09:00:23 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 09:00:23 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:23 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:00:23 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:00:24 | demand-backup/11-delete-data | ++ : logger.go:42: 09:00:24 | demand-backup/11-delete-data | + data= logger.go:42: 09:00:24 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 04-delete-data-minio-backup-source-0 --from-literal=data= logger.go:42: 09:00:25 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-0 created logger.go:42: 09:00:25 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 09:00:25 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 09:00:25 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 09:00:25 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:00:25 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ : logger.go:42: 09:00:27 | demand-backup/11-delete-data | + data= logger.go:42: 09:00:27 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 04-delete-data-minio-backup-source-1 --from-literal=data= logger.go:42: 09:00:27 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-1 created logger.go:42: 09:00:27 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 09:00:27 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 09:00:27 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 09:00:27 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 09:00:28 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 09:00:28 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:00:28 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:00:28 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:00:29 | demand-backup/11-delete-data | ++ : logger.go:42: 09:00:29 | demand-backup/11-delete-data | + data= logger.go:42: 09:00:29 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 04-delete-data-minio-backup-source-2 --from-literal=data= logger.go:42: 09:00:29 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-2 created logger.go:42: 09:00:31 | demand-backup/11-delete-data | test step completed 11-delete-data logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | starting test step 12-restore-from-minio-backup-source logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | running command: [sh -c set -o errexit set -o xtrace source ../../functions storage_name="minio" backup_name="demand-backup-minio" restore_name="demand-backup-restore-minio-backup-source" cluster_name="${test_name}${name_suffix:+-$name_suffix}" destination=$(kubectl -n "${NAMESPACE}" get ps-backup "${backup_name}" -o jsonpath='{.status.destination}') cat "${DEPLOY_DIR}/restore.yaml" \ | yq eval "$(printf '.metadata.name="%s"' "${restore_name}")" - \ | yq eval "$(printf '.spec.clusterName="%s"' "${cluster_name}")" - \ | yq eval "del(.spec.backupName)" - \ | yq eval "$(printf '.spec.backupSource.destination="%s"' "${destination}")" - \ | yq eval '.spec.backupSource.storage.type="s3"' - \ | yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - \ | yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - \ | yq eval '.spec.backupSource.storage.s3.endpointUrl="http://minio-service:9000"' - \ | yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - \ | kubectl apply -n "${NAMESPACE}" -f -] logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + source ../../functions logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ realpath ../../.. logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++++ pwd logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++ test_name=demand-backup logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ GIT_BRANCH=PR-523 logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export VERSION=PR-523-f00253e logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ VERSION=PR-523-f00253e logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++++ which gdate logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++++ which date logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ date=/usr/bin/date logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ command -v oc logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ kubectl get nodes logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | +++ grep '^minikube' logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + storage_name=minio logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + backup_name=demand-backup-minio logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + restore_name=demand-backup-restore-minio-backup-source logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + cluster_name=demand-backup logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++ kubectl -n kuttl-test-awake-shepherd get ps-backup demand-backup-minio -o 'jsonpath={.status.destination}' logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + destination=s3://operator-testing/demand-backup-2024-02-06-08:53:57-full logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + cat /mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy/restore.yaml logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval 'del(.spec.backupName)' - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.backupSource.destination="%s"' s3://operator-testing/demand-backup-2024-02-06-08:53:57-full logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.endpointUrl="http://minio-service:9000"' - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.destination="s3://operator-testing/demand-backup-2024-02-06-08:53:57-full"' - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.type="s3"' - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.clusterName="%s"' demand-backup logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.clusterName="demand-backup"' - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + kubectl apply -n kuttl-test-awake-shepherd -f - logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.metadata.name="%s"' demand-backup-restore-minio-backup-source logger.go:42: 09:00:31 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.metadata.name="demand-backup-restore-minio-backup-source"' - logger.go:42: 09:00:32 | demand-backup/12-restore-from-minio-backup-source | perconaservermysqlrestore.ps.percona.com/demand-backup-restore-minio-backup-source created logger.go:42: 09:05:45 | demand-backup/12-restore-from-minio-backup-source | test step completed 12-restore-from-minio-backup-source logger.go:42: 09:05:45 | demand-backup/13-read-data | starting test step 13-read-data logger.go:42: 09:05:45 | demand-backup/13-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2 do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 09-read-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 09:05:45 | demand-backup/13-read-data | + source ../../functions logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ realpath ../../.. logger.go:42: 09:05:45 | demand-backup/13-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:05:45 | demand-backup/13-read-data | ++++ pwd logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:05:45 | demand-backup/13-read-data | ++ test_name=demand-backup logger.go:42: 09:05:45 | demand-backup/13-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:05:45 | demand-backup/13-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:05:45 | demand-backup/13-read-data | ++++ which gdate logger.go:42: 09:05:45 | demand-backup/13-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:05:45 | demand-backup/13-read-data | ++++ which date logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ date=/usr/bin/date logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ command -v oc logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ kubectl get nodes logger.go:42: 09:05:45 | demand-backup/13-read-data | +++ grep '^minikube' logger.go:42: 09:05:46 | demand-backup/13-read-data | ++ get_cluster_name logger.go:42: 09:05:46 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:05:46 | demand-backup/13-read-data | + cluster_name=demand-backup logger.go:42: 09:05:46 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 09:05:46 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:46 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:05:46 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:46 | demand-backup/13-read-data | ++ local pod= logger.go:42: 09:05:46 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 09:05:46 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:05:47 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 09:05:47 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 09:05:47 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 09:05:47 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 09:05:47 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 09:05:47 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:47 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:05:47 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:05:48 | demand-backup/13-read-data | + data=100500 logger.go:42: 09:05:48 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 09-read-data-minio-backup-source-0 --from-literal=data=100500 logger.go:42: 09:05:49 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-0 created logger.go:42: 09:05:49 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ local pod= logger.go:42: 09:05:49 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 09:05:49 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 09:05:49 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:05:49 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:05:50 | demand-backup/13-read-data | + data=100500 logger.go:42: 09:05:50 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 09-read-data-minio-backup-source-1 --from-literal=data=100500 logger.go:42: 09:05:51 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-1 created logger.go:42: 09:05:51 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ local pod= logger.go:42: 09:05:51 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 09:05:51 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 09:05:51 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:05:51 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:05:53 | demand-backup/13-read-data | + data=100500 logger.go:42: 09:05:53 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 09-read-data-minio-backup-source-2 --from-literal=data=100500 logger.go:42: 09:05:53 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-2 created logger.go:42: 09:05:55 | demand-backup/13-read-data | test step completed 13-read-data logger.go:42: 09:05:55 | demand-backup/14-create-backup-s3 | starting test step 14-create-backup-s3 logger.go:42: 09:05:56 | demand-backup/14-create-backup-s3 | PerconaServerMySQLBackup:kuttl-test-awake-shepherd/demand-backup-s3 created logger.go:42: 09:06:06 | demand-backup/14-create-backup-s3 | test step completed 14-create-backup-s3 logger.go:42: 09:06:06 | demand-backup/15-delete-data | starting test step 15-delete-data logger.go:42: 09:06:06 | demand-backup/15-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 08-delete-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 09:06:06 | demand-backup/15-delete-data | + source ../../functions logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ realpath ../../.. logger.go:42: 09:06:06 | demand-backup/15-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:06:06 | demand-backup/15-delete-data | ++++ pwd logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:06:06 | demand-backup/15-delete-data | ++ test_name=demand-backup logger.go:42: 09:06:06 | demand-backup/15-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:06:06 | demand-backup/15-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:06:06 | demand-backup/15-delete-data | ++++ which gdate logger.go:42: 09:06:06 | demand-backup/15-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:06:06 | demand-backup/15-delete-data | ++++ which date logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ date=/usr/bin/date logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ command -v oc logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ kubectl get nodes logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ grep '^minikube' logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ get_cluster_name logger.go:42: 09:06:06 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:06:07 | demand-backup/15-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 09:06:07 | demand-backup/15-delete-data | ++ local cluster=demand-backup logger.go:42: 09:06:07 | demand-backup/15-delete-data | ++ echo demand-backup-haproxy logger.go:42: 09:06:07 | demand-backup/15-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:06:07 | demand-backup/15-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 09:06:07 | demand-backup/15-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:06:07 | demand-backup/15-delete-data | + local pod= logger.go:42: 09:06:07 | demand-backup/15-delete-data | ++ get_client_pod logger.go:42: 09:06:07 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:06:07 | demand-backup/15-delete-data | + client_pod=mysql-client logger.go:42: 09:06:07 | demand-backup/15-delete-data | + wait_pod mysql-client logger.go:42: 09:06:07 | demand-backup/15-delete-data | + local pod=mysql-client logger.go:42: 09:06:07 | demand-backup/15-delete-data | + set +o xtrace logger.go:42: 09:06:08 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 09:06:08 | demand-backup/15-delete-data | + sed -e 's/mysql: //' logger.go:42: 09:06:08 | demand-backup/15-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:06:08 | demand-backup/15-delete-data | + kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:06:09 | demand-backup/15-delete-data | + : logger.go:42: 09:06:09 | demand-backup/15-delete-data | ++ get_cluster_name logger.go:42: 09:06:09 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:06:09 | demand-backup/15-delete-data | + cluster_name=demand-backup logger.go:42: 09:06:09 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 09:06:09 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:09 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:06:09 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:09 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 09:06:09 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 09:06:09 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:06:10 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 09:06:10 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 09:06:10 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 09:06:10 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 09:06:10 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 09:06:10 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:06:10 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:06:10 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:11 | demand-backup/15-delete-data | ++ : logger.go:42: 09:06:11 | demand-backup/15-delete-data | + data= logger.go:42: 09:06:11 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 08-delete-data-s3-0 --from-literal=data= logger.go:42: 09:06:11 | demand-backup/15-delete-data | configmap/08-delete-data-s3-0 created logger.go:42: 09:06:11 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 09:06:11 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:11 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:06:11 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:11 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 09:06:11 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 09:06:11 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:06:12 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 09:06:12 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 09:06:12 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 09:06:12 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 09:06:12 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 09:06:12 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:12 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:06:12 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:06:13 | demand-backup/15-delete-data | ++ : logger.go:42: 09:06:13 | demand-backup/15-delete-data | + data= logger.go:42: 09:06:13 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 08-delete-data-s3-1 --from-literal=data= logger.go:42: 09:06:14 | demand-backup/15-delete-data | configmap/08-delete-data-s3-1 created logger.go:42: 09:06:14 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 09:06:14 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 09:06:14 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 09:06:14 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:06:14 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:06:16 | demand-backup/15-delete-data | ++ : logger.go:42: 09:06:16 | demand-backup/15-delete-data | + data= logger.go:42: 09:06:16 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 08-delete-data-s3-2 --from-literal=data= logger.go:42: 09:06:16 | demand-backup/15-delete-data | configmap/08-delete-data-s3-2 created logger.go:42: 09:06:17 | demand-backup/15-delete-data | test step completed 15-delete-data logger.go:42: 09:06:17 | demand-backup/16-restore-from-s3 | starting test step 16-restore-from-s3 logger.go:42: 09:06:18 | demand-backup/16-restore-from-s3 | PerconaServerMySQLRestore:kuttl-test-awake-shepherd/demand-backup-restore-s3 created logger.go:42: 09:11:13 | demand-backup/16-restore-from-s3 | test step completed 16-restore-from-s3 logger.go:42: 09:11:13 | demand-backup/17-read-data | starting test step 17-read-data logger.go:42: 09:11:13 | demand-backup/17-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 09:11:13 | demand-backup/17-read-data | + source ../../functions logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ realpath ../../.. logger.go:42: 09:11:13 | demand-backup/17-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:11:13 | demand-backup/17-read-data | ++++ pwd logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:11:13 | demand-backup/17-read-data | ++ test_name=demand-backup logger.go:42: 09:11:13 | demand-backup/17-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:11:13 | demand-backup/17-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:11:13 | demand-backup/17-read-data | ++++ which gdate logger.go:42: 09:11:13 | demand-backup/17-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:11:13 | demand-backup/17-read-data | ++++ which date logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ date=/usr/bin/date logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ command -v oc logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ kubectl get nodes logger.go:42: 09:11:13 | demand-backup/17-read-data | +++ grep '^minikube' logger.go:42: 09:11:13 | demand-backup/17-read-data | ++ get_cluster_name logger.go:42: 09:11:13 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:11:14 | demand-backup/17-read-data | + cluster_name=demand-backup logger.go:42: 09:11:14 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ local pod= logger.go:42: 09:11:14 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 09:11:14 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 09:11:14 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:11:14 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:11:15 | demand-backup/17-read-data | + data=100500 logger.go:42: 09:11:15 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-s3-0 --from-literal=data=100500 logger.go:42: 09:11:16 | demand-backup/17-read-data | configmap/06-read-data-s3-0 created logger.go:42: 09:11:16 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 09:11:16 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:16 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:11:16 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:16 | demand-backup/17-read-data | ++ local pod= logger.go:42: 09:11:16 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 09:11:16 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:11:16 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 09:11:16 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 09:11:16 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 09:11:16 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 09:11:17 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 09:11:17 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:17 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:11:17 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:11:18 | demand-backup/17-read-data | + data=100500 logger.go:42: 09:11:18 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-s3-1 --from-literal=data=100500 logger.go:42: 09:11:18 | demand-backup/17-read-data | configmap/06-read-data-s3-1 created logger.go:42: 09:11:18 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 09:11:18 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:18 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:11:18 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:18 | demand-backup/17-read-data | ++ local pod= logger.go:42: 09:11:18 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 09:11:18 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:11:18 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 09:11:18 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 09:11:18 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 09:11:18 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 09:11:19 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 09:11:19 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:19 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:11:19 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:11:20 | demand-backup/17-read-data | + data=100500 logger.go:42: 09:11:20 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-s3-2 --from-literal=data=100500 logger.go:42: 09:11:20 | demand-backup/17-read-data | configmap/06-read-data-s3-2 created logger.go:42: 09:11:22 | demand-backup/17-read-data | test step completed 17-read-data logger.go:42: 09:11:22 | demand-backup/18-create-backup-gcp | starting test step 18-create-backup-gcp logger.go:42: 09:11:23 | demand-backup/18-create-backup-gcp | PerconaServerMySQLBackup:kuttl-test-awake-shepherd/demand-backup-gcp created logger.go:42: 09:11:39 | demand-backup/18-create-backup-gcp | test step completed 18-create-backup-gcp logger.go:42: 09:11:39 | demand-backup/19-delete-data | starting test step 19-delete-data logger.go:42: 09:11:39 | demand-backup/19-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 12-delete-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 09:11:39 | demand-backup/19-delete-data | + source ../../functions logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ realpath ../../.. logger.go:42: 09:11:39 | demand-backup/19-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:11:39 | demand-backup/19-delete-data | ++++ pwd logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:11:39 | demand-backup/19-delete-data | ++ test_name=demand-backup logger.go:42: 09:11:39 | demand-backup/19-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:11:39 | demand-backup/19-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:11:39 | demand-backup/19-delete-data | ++++ which gdate logger.go:42: 09:11:39 | demand-backup/19-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:11:39 | demand-backup/19-delete-data | ++++ which date logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ date=/usr/bin/date logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ command -v oc logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ kubectl get nodes logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ grep '^minikube' logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ get_cluster_name logger.go:42: 09:11:39 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:11:40 | demand-backup/19-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 09:11:40 | demand-backup/19-delete-data | ++ local cluster=demand-backup logger.go:42: 09:11:40 | demand-backup/19-delete-data | ++ echo demand-backup-haproxy logger.go:42: 09:11:40 | demand-backup/19-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:11:40 | demand-backup/19-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 09:11:40 | demand-backup/19-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:11:40 | demand-backup/19-delete-data | + local pod= logger.go:42: 09:11:40 | demand-backup/19-delete-data | ++ get_client_pod logger.go:42: 09:11:40 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:11:40 | demand-backup/19-delete-data | + client_pod=mysql-client logger.go:42: 09:11:40 | demand-backup/19-delete-data | + wait_pod mysql-client logger.go:42: 09:11:40 | demand-backup/19-delete-data | + local pod=mysql-client logger.go:42: 09:11:40 | demand-backup/19-delete-data | + set +o xtrace logger.go:42: 09:11:40 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 09:11:40 | demand-backup/19-delete-data | + kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:11:40 | demand-backup/19-delete-data | + sed -e 's/mysql: //' logger.go:42: 09:11:40 | demand-backup/19-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:11:41 | demand-backup/19-delete-data | + : logger.go:42: 09:11:41 | demand-backup/19-delete-data | ++ get_cluster_name logger.go:42: 09:11:41 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:11:42 | demand-backup/19-delete-data | + cluster_name=demand-backup logger.go:42: 09:11:42 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 09:11:42 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:42 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:11:42 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:42 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 09:11:42 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 09:11:42 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:11:42 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 09:11:42 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 09:11:42 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 09:11:42 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 09:11:43 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 09:11:43 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:43 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:11:43 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:11:44 | demand-backup/19-delete-data | ++ : logger.go:42: 09:11:44 | demand-backup/19-delete-data | + data= logger.go:42: 09:11:44 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 12-delete-data-gcp-0 --from-literal=data= logger.go:42: 09:11:44 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-0 created logger.go:42: 09:11:44 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 09:11:44 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:44 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:11:44 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:44 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 09:11:44 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 09:11:44 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:11:45 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 09:11:45 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 09:11:45 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 09:11:45 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 09:11:45 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 09:11:45 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:45 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:11:45 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:11:46 | demand-backup/19-delete-data | ++ : logger.go:42: 09:11:46 | demand-backup/19-delete-data | + data= logger.go:42: 09:11:46 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 12-delete-data-gcp-1 --from-literal=data= logger.go:42: 09:11:46 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-1 created logger.go:42: 09:11:46 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 09:11:46 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:46 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:11:46 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:46 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 09:11:46 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 09:11:46 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:11:47 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 09:11:47 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 09:11:47 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 09:11:47 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 09:11:47 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 09:11:47 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:11:47 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:11:47 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:11:48 | demand-backup/19-delete-data | ++ : logger.go:42: 09:11:48 | demand-backup/19-delete-data | + data= logger.go:42: 09:11:48 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 12-delete-data-gcp-2 --from-literal=data= logger.go:42: 09:11:49 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-2 created logger.go:42: 09:11:50 | demand-backup/19-delete-data | test step completed 19-delete-data logger.go:42: 09:11:50 | demand-backup/20-restore-from-gcp | starting test step 20-restore-from-gcp logger.go:42: 09:11:51 | demand-backup/20-restore-from-gcp | PerconaServerMySQLRestore:kuttl-test-awake-shepherd/demand-backup-restore-gcp created logger.go:42: 09:16:53 | demand-backup/20-restore-from-gcp | test step completed 20-restore-from-gcp logger.go:42: 09:16:53 | demand-backup/21-read-data | starting test step 21-read-data logger.go:42: 09:16:53 | demand-backup/21-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 09:16:53 | demand-backup/21-read-data | + source ../../functions logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ realpath ../../.. logger.go:42: 09:16:53 | demand-backup/21-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:16:53 | demand-backup/21-read-data | ++++ pwd logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:16:53 | demand-backup/21-read-data | ++ test_name=demand-backup logger.go:42: 09:16:53 | demand-backup/21-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:16:53 | demand-backup/21-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:16:53 | demand-backup/21-read-data | ++++ which gdate logger.go:42: 09:16:53 | demand-backup/21-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:16:53 | demand-backup/21-read-data | ++++ which date logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ date=/usr/bin/date logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ command -v oc logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ kubectl get nodes logger.go:42: 09:16:53 | demand-backup/21-read-data | +++ grep '^minikube' logger.go:42: 09:16:54 | demand-backup/21-read-data | ++ get_cluster_name logger.go:42: 09:16:54 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:16:54 | demand-backup/21-read-data | + cluster_name=demand-backup logger.go:42: 09:16:54 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 09:16:54 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:16:54 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:16:54 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:16:54 | demand-backup/21-read-data | ++ local pod= logger.go:42: 09:16:54 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 09:16:54 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:16:55 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 09:16:55 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 09:16:55 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 09:16:55 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 09:16:55 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 09:16:55 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:16:55 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:16:55 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:16:56 | demand-backup/21-read-data | + data=100500 logger.go:42: 09:16:56 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-gcp-0 --from-literal=data=100500 logger.go:42: 09:16:56 | demand-backup/21-read-data | configmap/06-read-data-gcp-0 created logger.go:42: 09:16:56 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 09:16:56 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:16:56 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:16:56 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:16:56 | demand-backup/21-read-data | ++ local pod= logger.go:42: 09:16:56 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 09:16:56 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:16:57 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 09:16:57 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 09:16:57 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 09:16:57 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 09:16:57 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 09:16:57 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:16:57 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:16:57 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:16:58 | demand-backup/21-read-data | + data=100500 logger.go:42: 09:16:58 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-gcp-1 --from-literal=data=100500 logger.go:42: 09:16:59 | demand-backup/21-read-data | configmap/06-read-data-gcp-1 created logger.go:42: 09:16:59 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 09:16:59 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:16:59 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:16:59 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:16:59 | demand-backup/21-read-data | ++ local pod= logger.go:42: 09:16:59 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 09:16:59 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:16:59 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 09:16:59 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 09:16:59 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 09:16:59 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 09:17:00 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 09:17:00 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:00 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:17:00 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:17:01 | demand-backup/21-read-data | + data=100500 logger.go:42: 09:17:01 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-gcp-2 --from-literal=data=100500 logger.go:42: 09:17:01 | demand-backup/21-read-data | configmap/06-read-data-gcp-2 created logger.go:42: 09:17:02 | demand-backup/21-read-data | test step completed 21-read-data logger.go:42: 09:17:02 | demand-backup/22-create-backup-azure | starting test step 22-create-backup-azure logger.go:42: 09:17:04 | demand-backup/22-create-backup-azure | PerconaServerMySQLBackup:kuttl-test-awake-shepherd/demand-backup-azure created logger.go:42: 09:17:15 | demand-backup/22-create-backup-azure | test step completed 22-create-backup-azure logger.go:42: 09:17:15 | demand-backup/23-delete-data | starting test step 23-delete-data logger.go:42: 09:17:15 | demand-backup/23-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 16-delete-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 09:17:15 | demand-backup/23-delete-data | + source ../../functions logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ realpath ../../.. logger.go:42: 09:17:15 | demand-backup/23-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:17:15 | demand-backup/23-delete-data | ++++ pwd logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:17:15 | demand-backup/23-delete-data | ++ test_name=demand-backup logger.go:42: 09:17:15 | demand-backup/23-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:17:15 | demand-backup/23-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:17:15 | demand-backup/23-delete-data | ++++ which gdate logger.go:42: 09:17:15 | demand-backup/23-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:17:15 | demand-backup/23-delete-data | ++++ which date logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ date=/usr/bin/date logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ command -v oc logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ kubectl get nodes logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ grep '^minikube' logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ get_cluster_name logger.go:42: 09:17:15 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:17:16 | demand-backup/23-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 09:17:16 | demand-backup/23-delete-data | ++ local cluster=demand-backup logger.go:42: 09:17:16 | demand-backup/23-delete-data | ++ echo demand-backup-haproxy logger.go:42: 09:17:16 | demand-backup/23-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:17:16 | demand-backup/23-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 09:17:16 | demand-backup/23-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:17:16 | demand-backup/23-delete-data | + local pod= logger.go:42: 09:17:16 | demand-backup/23-delete-data | ++ get_client_pod logger.go:42: 09:17:16 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:17:16 | demand-backup/23-delete-data | + client_pod=mysql-client logger.go:42: 09:17:16 | demand-backup/23-delete-data | + wait_pod mysql-client logger.go:42: 09:17:16 | demand-backup/23-delete-data | + local pod=mysql-client logger.go:42: 09:17:16 | demand-backup/23-delete-data | + set +o xtrace logger.go:42: 09:17:16 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 09:17:16 | demand-backup/23-delete-data | + kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 09:17:16 | demand-backup/23-delete-data | + sed -e 's/mysql: //' logger.go:42: 09:17:16 | demand-backup/23-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:17:18 | demand-backup/23-delete-data | + : logger.go:42: 09:17:18 | demand-backup/23-delete-data | ++ get_cluster_name logger.go:42: 09:17:18 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:17:18 | demand-backup/23-delete-data | + cluster_name=demand-backup logger.go:42: 09:17:18 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 09:17:18 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:18 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:17:18 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:18 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 09:17:18 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 09:17:18 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:17:19 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 09:17:19 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 09:17:19 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 09:17:19 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 09:17:19 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 09:17:19 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:19 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:17:19 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:17:20 | demand-backup/23-delete-data | ++ : logger.go:42: 09:17:20 | demand-backup/23-delete-data | + data= logger.go:42: 09:17:20 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 16-delete-data-azure-0 --from-literal=data= logger.go:42: 09:17:20 | demand-backup/23-delete-data | configmap/16-delete-data-azure-0 created logger.go:42: 09:17:20 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 09:17:20 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:20 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:17:20 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:20 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 09:17:20 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 09:17:20 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:17:21 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 09:17:21 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 09:17:21 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 09:17:21 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 09:17:21 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 09:17:21 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:21 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:17:21 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:17:22 | demand-backup/23-delete-data | ++ : logger.go:42: 09:17:22 | demand-backup/23-delete-data | + data= logger.go:42: 09:17:22 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 16-delete-data-azure-1 --from-literal=data= logger.go:42: 09:17:23 | demand-backup/23-delete-data | configmap/16-delete-data-azure-1 created logger.go:42: 09:17:23 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 09:17:23 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 09:17:23 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 09:17:23 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 09:17:23 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:17:25 | demand-backup/23-delete-data | ++ : logger.go:42: 09:17:25 | demand-backup/23-delete-data | + data= logger.go:42: 09:17:25 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-awake-shepherd 16-delete-data-azure-2 --from-literal=data= logger.go:42: 09:17:25 | demand-backup/23-delete-data | configmap/16-delete-data-azure-2 created logger.go:42: 09:17:26 | demand-backup/23-delete-data | test step completed 23-delete-data logger.go:42: 09:17:26 | demand-backup/24-restore-from-azure | starting test step 24-restore-from-azure logger.go:42: 09:17:28 | demand-backup/24-restore-from-azure | PerconaServerMySQLRestore:kuttl-test-awake-shepherd/demand-backup-restore-azure created logger.go:42: 09:22:30 | demand-backup/24-restore-from-azure | test step completed 24-restore-from-azure logger.go:42: 09:22:30 | demand-backup/25-read-data | starting test step 25-read-data logger.go:42: 09:22:30 | demand-backup/25-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 09:22:30 | demand-backup/25-read-data | + source ../../functions logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ realpath ../../.. logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:22:30 | demand-backup/25-read-data | ++++ pwd logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ test_name=demand-backup logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:22:30 | demand-backup/25-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ GIT_BRANCH=PR-523 logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export VERSION=PR-523-f00253e logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ VERSION=PR-523-f00253e logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:22:30 | demand-backup/25-read-data | ++++ which gdate logger.go:42: 09:22:30 | demand-backup/25-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:22:30 | demand-backup/25-read-data | ++++ which date logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ date=/usr/bin/date logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ command -v oc logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ kubectl get nodes logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ grep '^minikube' logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ get_cluster_name logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-awake-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 09:22:30 | demand-backup/25-read-data | + cluster_name=demand-backup logger.go:42: 09:22:30 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:30 | demand-backup/25-read-data | ++ local pod= logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 09:22:30 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:22:31 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 09:22:31 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 09:22:31 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 09:22:31 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 09:22:31 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 09:22:31 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:31 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:22:31 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:22:32 | demand-backup/25-read-data | + data=100500 logger.go:42: 09:22:32 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-azure-0 --from-literal=data=100500 logger.go:42: 09:22:33 | demand-backup/25-read-data | configmap/06-read-data-azure-0 created logger.go:42: 09:22:33 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ local pod= logger.go:42: 09:22:33 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 09:22:33 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 09:22:33 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:22:33 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:35 | demand-backup/25-read-data | + data=100500 logger.go:42: 09:22:35 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-azure-1 --from-literal=data=100500 logger.go:42: 09:22:35 | demand-backup/25-read-data | configmap/06-read-data-azure-1 created logger.go:42: 09:22:35 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 09:22:35 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:35 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 09:22:35 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:35 | demand-backup/25-read-data | ++ local pod= logger.go:42: 09:22:35 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 09:22:35 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-awake-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 09:22:35 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 09:22:35 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 09:22:35 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 09:22:35 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 09:22:36 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 09:22:36 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-awake-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 09:22:36 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 09:22:36 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 09:22:37 | demand-backup/25-read-data | + data=100500 logger.go:42: 09:22:37 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-awake-shepherd 06-read-data-azure-2 --from-literal=data=100500 logger.go:42: 09:22:37 | demand-backup/25-read-data | configmap/06-read-data-azure-2 created logger.go:42: 09:22:39 | demand-backup/25-read-data | test step completed 25-read-data logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | starting test step 26-delete-all-backups logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete ps-backup --all -n "${NAMESPACE}" backup_name_minio="demand-backup-minio" accessKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_ACCESS_KEY_ID}' | base64 -d)" secretKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_SECRET_ACCESS_KEY}' | base64 -d)" backup_exists=$( kubectl run -n "${NAMESPACE}" -i --rm aws-cli --image=perconalab/awscli --restart=Never -- \ /usr/bin/env AWS_ACCESS_KEY_ID="${accessKey}" AWS_SECRET_ACCESS_KEY="${secretKey}" AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls 'operator-testing/' | grep -c "${backup_name_minio}/" | cat exit "${PIPESTATUS[0]}" ) if [[ 1 -eq $backup_exists ]]; then echo "Backup was not removed from bucket -- minio" exit 1 fi] logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | + source ../../functions logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ realpath ../../.. logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | ++++ pwd logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/tests/demand-backup logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | ++ test_name=demand-backup logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/vars.sh logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-523 logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/deploy logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-523/e2e-tests/conf logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export GIT_BRANCH=PR-523 logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ GIT_BRANCH=PR-523 logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export VERSION=PR-523-f00253e logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ VERSION=PR-523-f00253e logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-523-f00253e logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | ++++ which gdate logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-523/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | ++++ which date logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ date=/usr/bin/date logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ command -v oc logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ kubectl get nodes logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | +++ grep '^minikube' logger.go:42: 09:22:39 | demand-backup/26-delete-all-backups | + kubectl delete ps-backup --all -n kuttl-test-awake-shepherd logger.go:42: 09:22:40 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-azure" deleted logger.go:42: 09:22:40 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-gcp" deleted logger.go:42: 09:22:40 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-minio" deleted logger.go:42: 09:22:40 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-s3" deleted logger.go:42: 09:22:43 | demand-backup/26-delete-all-backups | + backup_name_minio=demand-backup-minio logger.go:42: 09:22:43 | demand-backup/26-delete-all-backups | ++ kubectl -n kuttl-test-awake-shepherd get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 09:22:43 | demand-backup/26-delete-all-backups | ++ base64 -d logger.go:42: 09:22:44 | demand-backup/26-delete-all-backups | + accessKey='some-access$\n"-key' logger.go:42: 09:22:44 | demand-backup/26-delete-all-backups | ++ kubectl -n kuttl-test-awake-shepherd get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 09:22:44 | demand-backup/26-delete-all-backups | ++ base64 -d logger.go:42: 09:22:44 | demand-backup/26-delete-all-backups | + secretKey='some-$\n"secret-key' logger.go:42: 09:22:44 | demand-backup/26-delete-all-backups | ++ kubectl run -n kuttl-test-awake-shepherd -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env 'AWS_ACCESS_KEY_ID=some-access$\n"-key' 'AWS_SECRET_ACCESS_KEY=some-$\n"secret-key' AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls operator-testing/ logger.go:42: 09:22:44 | demand-backup/26-delete-all-backups | ++ grep -c demand-backup-minio/ logger.go:42: 09:22:44 | demand-backup/26-delete-all-backups | ++ cat logger.go:42: 09:22:49 | demand-backup/26-delete-all-backups | ++ exit 0 logger.go:42: 09:22:49 | demand-backup/26-delete-all-backups | + backup_exists=0 logger.go:42: 09:22:49 | demand-backup/26-delete-all-backups | + [[ 1 -eq 0 ]] logger.go:42: 09:22:49 | demand-backup/26-delete-all-backups | test step completed 26-delete-all-backups logger.go:42: 09:22:49 | demand-backup/99-drop-finalizer | starting test step 99-drop-finalizer logger.go:42: 09:22:51 | demand-backup/99-drop-finalizer | PerconaServerMySQL:kuttl-test-awake-shepherd/demand-backup updated logger.go:42: 09:22:51 | demand-backup/99-drop-finalizer | test step completed 99-drop-finalizer logger.go:42: 09:22:51 | demand-backup | demand-backup events from ns kuttl-test-awake-shepherd: logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:43 +0000 UTC Normal Pod percona-server-mysql-operator-b4c599bbb-cn75v Scheduled Successfully assigned kuttl-test-awake-shepherd/percona-server-mysql-operator-b4c599bbb-cn75v to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:43 +0000 UTC Normal ReplicaSet.apps percona-server-mysql-operator-b4c599bbb SuccessfulCreate Created pod: percona-server-mysql-operator-b4c599bbb-cn75v replicaset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:43 +0000 UTC Normal Deployment.apps percona-server-mysql-operator ScalingReplicaSet Scaled up replica set percona-server-mysql-operator-b4c599bbb to 1 deployment-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:46 +0000 UTC Normal Pod percona-server-mysql-operator-b4c599bbb-cn75v.spec.containers{manager} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:47 +0000 UTC Normal Pod mysql-client Scheduled Successfully assigned kuttl-test-awake-shepherd/mysql-client to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:47 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulling Pulling image "percona/percona-server:8.0.33" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:58 +0000 UTC Normal ReplicaSet.apps minio-service-85cdcd4d44 SuccessfulCreate Created pod: minio-service-85cdcd4d44-jfmbv replicaset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:58 +0000 UTC Normal PersistentVolumeClaim minio-service WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:58 +0000 UTC Normal Deployment.apps minio-service ScalingReplicaSet Scaled up replica set minio-service-85cdcd4d44 to 1 deployment-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:58 +0000 UTC Normal PersistentVolumeClaim minio-service ExternalProvisioning waiting for a volume to be created, either by external provisioner "pd.csi.storage.gke.io" or manually created by system administrator persistentvolume-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:58 +0000 UTC Normal PersistentVolumeClaim minio-service Provisioning External provisioner is provisioning volume for claim "kuttl-test-awake-shepherd/minio-service" pd.csi.storage.gke.io_gke-ea81839c2da84c0ba676-5d8a-e595-vm_7d0d6e63-6cc4-486b-9afc-fdc7f59382fc logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:59 +0000 UTC Normal Pod minio-service-post-job-zwrnz Scheduled Successfully assigned kuttl-test-awake-shepherd/minio-service-post-job-zwrnz to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:45:59 +0000 UTC Normal Job.batch minio-service-post-job SuccessfulCreate Created pod: minio-service-post-job-zwrnz job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:00 +0000 UTC Normal Pod minio-service-post-job-zwrnz.spec.containers{minio-make-user} Pulling Pulling image "quay.io/minio/mc:RELEASE.2023-09-29T16-41-22Z" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:02 +0000 UTC Normal Lease.coordination.k8s.io 08db2feb.percona.com LeaderElection percona-server-mysql-operator-b4c599bbb-cn75v_2ea7e691-b4e2-4dea-b2a7-a9565c19e81d became leader percona-server-mysql-operator-b4c599bbb-cn75v_2ea7e691-b4e2-4dea-b2a7-a9565c19e81d logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:02 +0000 UTC Normal Pod minio-service-85cdcd4d44-jfmbv Scheduled Successfully assigned kuttl-test-awake-shepherd/minio-service-85cdcd4d44-jfmbv to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:02 +0000 UTC Normal PersistentVolumeClaim minio-service ProvisioningSucceeded Successfully provisioned volume pvc-40e19cbc-dc3a-404c-a38b-c5f32f32da2d pd.csi.storage.gke.io_gke-ea81839c2da84c0ba676-5d8a-e595-vm_7d0d6e63-6cc4-486b-9afc-fdc7f59382fc logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:02 +0000 UTC Normal Pod percona-server-mysql-operator-b4c599bbb-cn75v.spec.containers{manager} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 16.00918832s (16.009276231s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:02 +0000 UTC Normal Pod percona-server-mysql-operator-b4c599bbb-cn75v.spec.containers{manager} Created Created container manager kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:02 +0000 UTC Normal Pod percona-server-mysql-operator-b4c599bbb-cn75v.spec.containers{manager} Started Started container manager kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:10 +0000 UTC Normal Pod minio-service-85cdcd4d44-jfmbv SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-40e19cbc-dc3a-404c-a38b-c5f32f32da2d" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:12 +0000 UTC Normal Pod minio-service-85cdcd4d44-jfmbv.spec.containers{minio} Pulling Pulling image "quay.io/minio/minio:RELEASE.2023-09-30T07-02-29Z" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:16 +0000 UTC Normal Pod minio-service-85cdcd4d44-jfmbv.spec.containers{minio} Pulled Successfully pulled image "quay.io/minio/minio:RELEASE.2023-09-30T07-02-29Z" in 4.314295764s (4.314311263s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:16 +0000 UTC Normal Pod minio-service-85cdcd4d44-jfmbv.spec.containers{minio} Created Created container minio kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:16 +0000 UTC Normal Pod minio-service-85cdcd4d44-jfmbv.spec.containers{minio} Started Started container minio kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:25 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulled Successfully pulled image "percona/percona-server:8.0.33" in 23.471483561s (38.046531953s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:25 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Created Created container mysql-client kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:25 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Started Started container mysql-client kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:29 +0000 UTC Normal Pod minio-service-post-job-zwrnz.spec.containers{minio-make-user} Pulled Successfully pulled image "quay.io/minio/mc:RELEASE.2023-09-29T16-41-22Z" in 3.37895571s (28.555195488s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:29 +0000 UTC Normal Pod minio-service-post-job-zwrnz.spec.containers{minio-make-user} Created Created container minio-make-user kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:29 +0000 UTC Normal Pod minio-service-post-job-zwrnz.spec.containers{minio-make-user} Started Started container minio-make-user kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:39 +0000 UTC Normal Job.batch minio-service-post-job Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:41 +0000 UTC Normal Pod aws-cli Scheduled Successfully assigned kuttl-test-awake-shepherd/aws-cli to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:42 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:45 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 3.188312889s (3.188320099s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:45 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container aws-cli kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:45 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:56 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:56 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-awake-shepherd/datadir-demand-backup-mysql-0" pd.csi.storage.gke.io_gke-ea81839c2da84c0ba676-5d8a-e595-vm_7d0d6e63-6cc4-486b-9afc-fdc7f59382fc logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:56 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "pd.csi.storage.gke.io" or manually created by system administrator persistentvolume-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:56 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-0 Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:56 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:56 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:56 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:46:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:00 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-a30f5a88-ab7c-4630-b831-61ea99dd7025 pd.csi.storage.gke.io_gke-ea81839c2da84c0ba676-5d8a-e595-vm_7d0d6e63-6cc4-486b-9afc-fdc7f59382fc logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:00 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:08 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a30f5a88-ab7c-4630-b831-61ea99dd7025" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 105.64728ms (105.671075ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:11 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 13.811321522s (13.811335566s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:17 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:19 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 1.419990896s (1.420008273s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:19 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:19 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:19 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:19 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 96.830064ms (96.843424ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:19 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:19 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 20.357740513s (20.357754889s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 7.830566199s (7.830579247s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 11.108735788s (11.108747534s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:52 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:52 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:47:52 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:04 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:04 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-awake-shepherd/datadir-demand-backup-mysql-1" pd.csi.storage.gke.io_gke-ea81839c2da84c0ba676-5d8a-e595-vm_7d0d6e63-6cc4-486b-9afc-fdc7f59382fc logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:04 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ExternalProvisioning waiting for a volume to be created, either by external provisioner "pd.csi.storage.gke.io" or manually created by system administrator persistentvolume-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:04 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-1 Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:04 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:05 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 84.747886ms (84.756096ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:05 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:08 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-bc5110f5-8b6e-4c18-ba6f-fdd045165796 pd.csi.storage.gke.io_gke-ea81839c2da84c0ba676-5d8a-e595-vm_7d0d6e63-6cc4-486b-9afc-fdc7f59382fc logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:08 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 15.183674979s (15.183684516s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 3.170773579s (3.170781352s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:11 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 118.796035ms (118.811037ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:11 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:11 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:11 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:11 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 92.924872ms (92.940189ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:14 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:14 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:16 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-bc5110f5-8b6e-4c18-ba6f-fdd045165796" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:16 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 1.446474372s (1.446488295s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:16 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:16 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:16 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:16 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 96.853217ms (96.869376ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:16 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:16 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:17 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 3.138890048s (3.138910037s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:17 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:17 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:17 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 114.537993ms (114.55131ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 85.482918ms (85.537218ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 90.065996ms (90.081396ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:23 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 22.855711921s (22.855780016s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:47 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:47 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:48 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 4.702576626s (24.541732627s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:48 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:48 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:48 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:48 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:48 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 91.616393ms (91.631583ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:48 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:48 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:49 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 1.375646556s (1.375662299s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 93.306636ms (93.321801ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:48:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:00 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 106.298673ms (11.886440893s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:00 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:00 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:00 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 11.895585493s (16.494889167s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:00 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:00 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:00 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 9.913395282s (9.913424817s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:21 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:21 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 89.404205ms (89.424898ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 105.77943ms (105.823015ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:58 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:58 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ExternalProvisioning waiting for a volume to be created, either by external provisioner "pd.csi.storage.gke.io" or manually created by system administrator persistentvolume-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:58 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-awake-shepherd/datadir-demand-backup-mysql-2" pd.csi.storage.gke.io_gke-ea81839c2da84c0ba676-5d8a-e595-vm_7d0d6e63-6cc4-486b-9afc-fdc7f59382fc logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:58 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-2 Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:49:58 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:02 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-a067a046-d241-4770-b324-3569c879f8a3 pd.csi.storage.gke.io_gke-ea81839c2da84c0ba676-5d8a-e595-vm_7d0d6e63-6cc4-486b-9afc-fdc7f59382fc logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:02 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:09 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a067a046-d241-4770-b324-3569c879f8a3" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 131.615302ms (131.632196ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:34 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 21.668849166s (21.668863315s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:34 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:34 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:34 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 10.740654733s (10.740724822s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:55 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 10.221643173s (10.221656017s including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:55 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:50:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:51:01 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:51:02 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:51:08 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 96.524648ms (96.542508ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:51:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:51:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:51:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:51:54 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/02/06 08:51:54 readiness check failed: connect to db: ping DB: dial tcp 10.204.161.8:33062: connect: connection refused kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:51:59 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/02/06 08:51:59 readiness check failed: connect to db: ping DB: dial tcp 10.204.161.8:33062: connect: connection refused kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:04 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:13 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:15 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:15 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 89.876979ms (89.902506ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:15 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:15 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 80.750674ms (80.764029ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 88.724156ms (88.756877ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 83.216778ms (83.23462ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:36 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:52:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 77.625792ms (77.659165ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:53:57 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-demand-backup-minio-minio-hzfzk to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:53:57 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio SuccessfulCreate Created pod: xb-demand-backup-minio-minio-hzfzk job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:53:58 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:53:58 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 93.244691ms (93.262636ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:53:58 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:53:58 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:00 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:00 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 93.838853ms (93.846763ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:00 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:00 +0000 UTC Normal Pod xb-demand-backup-minio-minio-hzfzk.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:07 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:51 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:51 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:52 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:52 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:52 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:53 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:53 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:53 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:53 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:53 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:54:53 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:12 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:16 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:16 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:16 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:16 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:23 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:54 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:54 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:55:54 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-restore-demand-backup-restore-minio-c9x8v to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:25 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-c9x8v job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:33 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a30f5a88-ab7c-4630-b831-61ea99dd7025" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:35 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 101.368795ms (101.385442ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:35 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:35 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:37 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:37 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 75.008722ms (75.01554ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:37 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:37 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-c9x8v.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:46 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:49 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:49 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:50 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:50 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 120.226786ms (120.246606ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:50 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:50 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 96.813809ms (96.827751ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 88.565599ms (88.580272ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 143.887371ms (143.908131ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 82.532013ms (82.546351ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 97.810111ms (97.824145ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 91.44935ms (91.457138ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:56:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:25 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:25 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:26 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:26 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 103.317504ms (103.344067ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:26 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:26 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 88.271082ms (88.288174ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 95.63678ms (95.65037ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:32 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 92.574669ms (92.589103ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:33 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-bc5110f5-8b6e-4c18-ba6f-fdd045165796" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 99.305618ms (99.313313ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 92.041959ms (92.060071ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 108.907207ms (108.928109ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:35 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:36 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Warning Pod demand-backup-haproxy-1 FailedMount MountVolume.SetUp failed for volume "config" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 88.32257ms (88.342195ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 119.905078ms (119.911707ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 94.265997ms (94.28275ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 117.566101ms (117.581728ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 108.396259ms (108.405237ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 115.104678ms (115.119363ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:41 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 81.274347ms (81.281472ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:44 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:44 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 81.7328ms (81.762596ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:44 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:44 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:44 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:44 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 102.464992ms (102.472738ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:44 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:44 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:56 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:57:59 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 122.099353ms (122.114065ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:00 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 103.236679ms (103.244703ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 99.555014ms (99.570183ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 93.836174ms (93.852533ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:30 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:37 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a067a046-d241-4770-b324-3569c879f8a3" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 115.005923ms (115.021679ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 89.915085ms (89.930021ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 95.751985ms (95.761269ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 90.70372ms (90.71859ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:58:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:59:02 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:59:02 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 08:59:05 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 101.587208ms (101.604469ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:38 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:38 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:40 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:40 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:42 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:42 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:42 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:46 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:00:51 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:01:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:01:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:01:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:01:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:14 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-restore-demand-backup-restore-minio-backup-source-rrqfp to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:14 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-backup-source-rrqfp job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:21 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a30f5a88-ab7c-4630-b831-61ea99dd7025" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 117.704851ms (117.719935ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:24 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 78.266764ms (78.281279ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-rrqfp.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:34 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:36 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:36 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 107.689637ms (107.705374ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 88.008669ms (88.027687ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 86.259194ms (86.275538ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 97.215982ms (97.229501ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 93.388322ms (93.400822ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 84.792588ms (84.807009ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 88.924887ms (88.932479ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:02:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:12 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:12 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:13 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:13 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 554.433242ms (554.450826ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:13 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:13 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 81.675499ms (81.693319ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 92.060516ms (92.079616ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:16 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:16 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 124.044401ms (124.051149ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:16 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:16 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 84.313007ms (84.329055ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 95.557807ms (95.566502ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:19 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:19 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:20 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 105.828875ms (105.841582ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:20 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:20 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:21 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-bc5110f5-8b6e-4c18-ba6f-fdd045165796" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 98.188078ms (98.202885ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 85.344962ms (85.380591ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:23 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 120.052565ms (120.062374ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 103.679893ms (103.851178ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 88.670852ms (88.678744ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:24 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 127.696831ms (164.417023ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 117.680529ms (117.688146ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 90.642899ms (90.651601ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 90.411658ms (90.419302ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:43 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 92.306791ms (92.341562ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:47 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:48 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:48 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 111.733909ms (111.742691ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:48 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:48 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 90.583061ms (90.598863ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 85.288523ms (85.304286ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:03:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:17 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:44 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a067a046-d241-4770-b324-3569c879f8a3" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 143.991764ms (144.009563ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 92.324976ms (92.360818ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 94.426706ms (94.433442ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 84.862173ms (84.876172ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:04:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:06 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:06 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 98.602967ms (98.6157ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:56 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-demand-backup-s3-aws-s3-bv5tk to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:56 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 SuccessfulCreate Created pod: xb-demand-backup-s3-aws-s3-bv5tk job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:57 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:57 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 108.523478ms (108.542031ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:57 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:57 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:58 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:58 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 101.318458ms (101.325751ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:58 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:05:58 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-bv5tk.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:05 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:22 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:22 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:22 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:22 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:23 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:23 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:33 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/02/06 09:06:33 readiness check failed: connect to db: ping DB: dial tcp 10.204.161.18:33062: connect: connection refused kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:38 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:52 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:06:52 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:07:23 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:07:23 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:07:55 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8 Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-restore-demand-backup-restore-s3-w6sk8 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:07:55 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 SuccessfulCreate Created pod: xb-restore-demand-backup-restore-s3-w6sk8 job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:07:59 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a30f5a88-ab7c-4630-b831-61ea99dd7025" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:06 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:06 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 107.358952ms (107.372115ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:06 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:06 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:07 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:07 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 81.158678ms (81.165901ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:07 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:08 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-w6sk8.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:21 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:23 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:23 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:24 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:24 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 119.301877ms (119.309941ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:24 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:24 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:25 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 91.04915ms (91.063439ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 277.794522ms (277.808323ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 84.126621ms (84.147463ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 75.272683ms (75.284687ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 89.74403ms (89.76014ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 88.501758ms (88.509252ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:08:59 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:00 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:00 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 137.549336ms (137.561122ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:00 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:00 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 83.752704ms (83.767849ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 92.080759ms (92.127437ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:03 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 94.75954ms (94.767776ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 98.816316ms (98.840832ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 115.506933ms (115.51989ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:07 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 89.899752ms (89.924637ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:08 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-bc5110f5-8b6e-4c18-ba6f-fdd045165796" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 106.231348ms (106.246473ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 107.582694ms (107.602296ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 129.581844ms (129.589712ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:11 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 94.052007ms (94.066281ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 96.635905ms (96.653975ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 112.927721ms (112.936136ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 100.767656ms (100.787301ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 92.992834ms (93.014458ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 103.183733ms (103.199047ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:31 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:31 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 100.985775ms (101.313383ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:34 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:36 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:36 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 110.651074ms (110.664694ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:36 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:36 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 77.512765ms (77.524029ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 105.192919ms (105.208571ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:09:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:05 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:12 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a067a046-d241-4770-b324-3569c879f8a3" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:14 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:14 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 111.850889ms (111.895841ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:14 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:14 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 104.661516ms (104.681514ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 96.019042ms (96.036377ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 92.783986ms (92.791714ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:35 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:35 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:10:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 91.584302ms (91.604082ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:23 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-demand-backup-gcp-gcp-cs-bwhgt to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:23 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs SuccessfulCreate Created pod: xb-demand-backup-gcp-gcp-cs-bwhgt job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:24 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:24 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 143.258028ms (143.272335ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:24 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:24 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:26 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:26 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 108.846703ms (108.871487ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:26 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:26 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-bwhgt.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:33 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:58 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:58 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:59 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:11:59 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:00 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:00 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:00 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:00 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:01 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:02 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:02 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:11 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:29 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:12:29 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:33 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-restore-demand-backup-restore-gcp-wrpvb to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:33 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp SuccessfulCreate Created pod: xb-restore-demand-backup-restore-gcp-wrpvb job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:40 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a30f5a88-ab7c-4630-b831-61ea99dd7025" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:43 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:43 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 117.46852ms (117.488024ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:43 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:43 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:44 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:44 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 102.589492ms (102.603777ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:45 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:45 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-wrpvb.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:57 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:13:59 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:00 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 105.638738ms (105.653569ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:02 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:02 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 91.864427ms (91.886109ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 98.943382ms (98.957953ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 80.108565ms (80.115473ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 76.880028ms (76.89677ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 80.238405ms (80.246002ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 112.124444ms (112.136047ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:35 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:36 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:36 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 121.077869ms (121.088134ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:36 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:36 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 102.746803ms (102.756371ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 113.818933ms (113.83486ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:39 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:39 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:40 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:40 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 111.608483ms (111.62511ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:40 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:40 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 84.972541ms (84.991916ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 85.230836ms (85.245012ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:43 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 107.033606ms (107.048939ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 80.921644ms (80.936135ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 91.347783ms (91.365582ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:46 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:47 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:47 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 108.367277ms (108.389766ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:47 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:47 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 88.027452ms (88.040353ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 80.813963ms (80.821461ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:49 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-bc5110f5-8b6e-4c18-ba6f-fdd045165796" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 98.607441ms (98.623061ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:51 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 101.663433ms (101.675349ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 106.377272ms (106.3853ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 91.446678ms (91.455939ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:11 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:11 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:11 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 113.513357ms (113.521979ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:13 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:13 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 94.405846ms (94.420843ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:13 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:13 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:13 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:13 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 92.067652ms (92.083583ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:13 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:13 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 104.127774ms (104.141277ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:45 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:53 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a067a046-d241-4770-b324-3569c879f8a3" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 112.908073ms (112.923159ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:55 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:55 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 77.843863ms (77.866858ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:55 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:55 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:55 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 114.052848ms (114.061294ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 100.575159ms (100.621666ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:15:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:16:15 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:16:15 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:16:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 89.257657ms (89.28347ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:04 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5 Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-demand-backup-azure-azure-blob-w5dc5 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:04 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:04 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob SuccessfulCreate Created pod: xb-demand-backup-azure-azure-blob-w5dc5 job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:05 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 88.76368ms (88.778759ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:05 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:05 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:06 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:06 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 89.607271ms (89.620002ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:06 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:06 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-w5dc5.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:13 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:32 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:32 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:32 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:32 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:33 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:33 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:36 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:36 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:36 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:40 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:40 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:40 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:43 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/02/06 09:17:43 readiness check failed: connect to db: ping DB: dial tcp 10.204.161.28:33062: connect: connection refused kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:17:48 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:18:03 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:18:03 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:18:34 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:18:34 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:09 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5 Scheduled Successfully assigned kuttl-test-awake-shepherd/xb-restore-demand-backup-restore-azure-2kct5 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:09 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure SuccessfulCreate Created pod: xb-restore-demand-backup-restore-azure-2kct5 job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:17 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a30f5a88-ab7c-4630-b831-61ea99dd7025" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:21 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:21 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 100.66026ms (100.669244ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:21 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:21 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:22 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:22 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 72.716871ms (72.723596ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-2kct5.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:35 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure Completed Job completed job-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:38 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:39 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 121.341983ms (121.359895ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 80.768412ms (80.779001ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:41 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 112.512469ms (112.527728ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 87.566357ms (87.582339ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 122.547243ms (122.561098ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 96.373746ms (96.381367ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 106.308676ms (106.315133ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:19:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:14 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 110.433509ms (110.454422ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:16 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-0 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 87.432272ms (87.440427ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 95.391406ms (95.398585ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 76.535252ms (76.551017ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 95.430394ms (95.437958ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 96.990416ms (96.998295ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:21 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-1 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 90.463863ms (90.481569ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 81.821629ms (81.835996ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 101.484617ms (101.504494ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:23 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-bc5110f5-8b6e-4c18-ba6f-fdd045165796" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:24 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-haproxy-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-6czj default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 110.298722ms (110.446735ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 90.050946ms (150.155584ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 98.461435ms (184.088373ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 99.343314ms (102.653983ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 87.386745ms (87.501101ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 98.330986ms (98.463461ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 121.424675ms (121.431279ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:46 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:46 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:49 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 105.282842ms (105.31538ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:50 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-orc-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 100.658415ms (100.674567ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 104.012065ms (104.02416ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 92.715799ms (92.728671ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:20 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-awake-shepherd/demand-backup-mysql-2 to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-ljvn default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:27 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a067a046-d241-4770-b324-3569c879f8a3" attachdetach-controller logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-523-f00253e" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-523-f00253e" in 101.960021ms (101.980065ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 88.65268ms (88.671126ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 99.502522ms (99.517216ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 121.280734ms (121.298325ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:50 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:21:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 105.162336ms (105.198898ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:22:44 +0000 UTC Normal Pod aws-cli Scheduled Successfully assigned kuttl-test-awake-shepherd/aws-cli to gke-jen-ps-523-f00253e-5-default-pool-0e55f656-32bm default-scheduler logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:22:45 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:22:45 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 140.943812ms (140.951541ms including waiting) kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:22:45 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container aws-cli kubelet logger.go:42: 09:22:51 | demand-backup | 2024-02-06 09:22:45 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli kubelet logger.go:42: 09:22:52 | demand-backup | Deleting namespace: kuttl-test-awake-shepherd === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (2296.97s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup (2288.48s) PASS