=== RUN kuttl harness.go:464: starting setup harness.go:255: running tests using configured kubeconfig. harness.go:278: Successful connection to cluster at: https://35.225.23.18 harness.go:363: running tests harness.go:75: going to run test suite with timeout of 180 seconds for each step harness.go:375: testsuite: e2e-tests/tests has 34 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup === PAUSE kuttl/harness/demand-backup === CONT kuttl/harness/demand-backup logger.go:42: 12:05:14 | demand-backup | Creating namespace: kuttl-test-clear-eel logger.go:42: 12:05:14 | demand-backup/0-minio-secret | starting test step 0-minio-secret logger.go:42: 12:05:14 | demand-backup/0-minio-secret | Secret:kuttl-test-clear-eel/minio-secret created logger.go:42: 12:05:15 | demand-backup/0-minio-secret | test step completed 0-minio-secret logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | starting test step 1-deploy-operator logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep apply_s3_storage_secrets deploy_operator deploy_non_tls_cluster_secrets deploy_tls_cluster_secrets deploy_client deploy_minio] logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | + source ../../functions logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ realpath ../../.. logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | ++++ pwd logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | ++ test_name=demand-backup logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ GIT_BRANCH=PR-916 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export MINIO_VER=5.4.0 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ MINIO_VER=5.4.0 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | ++++ which gdate logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | ++++ which date logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ date=/usr/bin/date logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ oc get projects logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ : logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ kubectl get nodes logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | +++ grep '^minikube' logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | ++ oc get projects logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | + init_temp_dir logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | + rm -rf /tmp/kuttl/ps/demand-backup logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | + mkdir -p /tmp/kuttl/ps/demand-backup logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | + apply_s3_storage_secrets logger.go:42: 12:05:15 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-clear-eel apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf/minio-secret.yml logger.go:42: 12:05:16 | demand-backup/1-deploy-operator | Warning: resource secrets/minio-secret is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 12:05:16 | demand-backup/1-deploy-operator | secret/minio-secret configured logger.go:42: 12:05:16 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-clear-eel apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf/cloud-secret.yml logger.go:42: 12:05:17 | demand-backup/1-deploy-operator | secret/aws-s3-secret created logger.go:42: 12:05:17 | demand-backup/1-deploy-operator | secret/gcp-cs-secret created logger.go:42: 12:05:17 | demand-backup/1-deploy-operator | secret/azure-secret created logger.go:42: 12:05:17 | demand-backup/1-deploy-operator | + deploy_operator logger.go:42: 12:05:17 | demand-backup/1-deploy-operator | + destroy_operator logger.go:42: 12:05:17 | demand-backup/1-deploy-operator | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 12:05:17 | demand-backup/1-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:05:18 | demand-backup/1-deploy-operator | deployment.apps "percona-server-mysql-operator" force deleted logger.go:42: 12:05:18 | demand-backup/1-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 12:05:18 | demand-backup/1-deploy-operator | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 12:05:18 | demand-backup/1-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:05:18 | demand-backup/1-deploy-operator | namespace "ps-operator" force deleted logger.go:42: 12:05:24 | demand-backup/1-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 12:05:24 | demand-backup/1-deploy-operator | + create_namespace ps-operator logger.go:42: 12:05:24 | demand-backup/1-deploy-operator | + local namespace=ps-operator logger.go:42: 12:05:24 | demand-backup/1-deploy-operator | + [[ -n '' ]] logger.go:42: 12:05:24 | demand-backup/1-deploy-operator | + kubectl delete namespace ps-operator --ignore-not-found logger.go:42: 12:05:24 | demand-backup/1-deploy-operator | + kubectl wait --for=delete namespace ps-operator logger.go:42: 12:05:25 | demand-backup/1-deploy-operator | + kubectl create namespace ps-operator logger.go:42: 12:05:25 | demand-backup/1-deploy-operator | namespace/ps-operator created logger.go:42: 12:05:25 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy/crd.yaml logger.go:42: 12:05:26 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 12:05:27 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 12:05:28 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 12:05:28 | demand-backup/1-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 12:05:28 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy/cw-rbac.yaml logger.go:42: 12:05:29 | demand-backup/1-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 12:05:29 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 12:05:29 | demand-backup/1-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged logger.go:42: 12:05:29 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 12:05:30 | demand-backup/1-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged logger.go:42: 12:05:30 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "DEBUG"' logger.go:42: 12:05:30 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply -f - logger.go:42: 12:05:30 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 12:05:30 | demand-backup/1-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:05:30 | demand-backup/1-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-916-a5bda550"' /mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy/cw-operator.yaml logger.go:42: 12:05:31 | demand-backup/1-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 12:05:31 | demand-backup/1-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 12:05:31 | demand-backup/1-deploy-operator | + deploy_non_tls_cluster_secrets logger.go:42: 12:05:31 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-clear-eel apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf/secrets.yaml logger.go:42: 12:05:32 | demand-backup/1-deploy-operator | secret/test-secrets created logger.go:42: 12:05:32 | demand-backup/1-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 12:05:32 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-clear-eel apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf/ssl-secret.yaml logger.go:42: 12:05:33 | demand-backup/1-deploy-operator | secret/test-ssl created logger.go:42: 12:05:33 | demand-backup/1-deploy-operator | + deploy_client logger.go:42: 12:05:33 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-clear-eel apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf/client.yaml logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | pod/mysql-client created logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | + deploy_minio logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | + local access_key logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | + local secret_key logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-clear-eel get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | + access_key=some-access-key logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-clear-eel get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | + secret_key=some-secret-key logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | + helm uninstall -n kuttl-test-clear-eel minio-service logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-916/kubeconfig logger.go:42: 12:05:34 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-916/kubeconfig logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | Error: uninstall: Release not loaded: minio-service: release: not found logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + : logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + helm repo remove minio logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-916/kubeconfig logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-916/kubeconfig logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | "minio" has been removed from your repositories logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + helm repo add minio https://charts.min.io/ logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-916/kubeconfig logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-916/kubeconfig logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | "minio" has been added to your repositories logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | +++ printf %q some-access-key logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | ++ printf %q some-access-key logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | +++ printf %q some-secret-key logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | ++ printf %q some-secret-key logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + retry 10 60 helm install minio-service -n kuttl-test-clear-eel --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + local max=10 logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + local delay=60 logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + shift 2 logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + local n=1 logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | + helm install minio-service -n kuttl-test-clear-eel --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-916/kubeconfig logger.go:42: 12:05:35 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-916/kubeconfig logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | NAME: minio-service logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | LAST DEPLOYED: Tue May 27 12:05:36 2025 logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | NAMESPACE: kuttl-test-clear-eel logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | STATUS: deployed logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | REVISION: 1 logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | TEST SUITE: None logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | NOTES: logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | MinIO can be accessed via port 9000 on the following DNS name from within your cluster: logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | minio-service.kuttl-test-clear-eel.cluster.local logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | To access MinIO from localhost, run the below commands: logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | 1. export POD_NAME=$(kubectl get pods --namespace kuttl-test-clear-eel -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | 2. kubectl port-forward $POD_NAME 9000 --namespace kuttl-test-clear-eel logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace kuttl-test-clear-eel minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace kuttl-test-clear-eel minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | 3. mc ls minio-service-local logger.go:42: 12:06:04 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-clear-eel get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:06:05 | demand-backup/1-deploy-operator | + MINIO_POD=minio-service-86dfccd949-b46pp logger.go:42: 12:06:05 | demand-backup/1-deploy-operator | + wait_pod minio-service-86dfccd949-b46pp logger.go:42: 12:06:05 | demand-backup/1-deploy-operator | + local pod=minio-service-86dfccd949-b46pp logger.go:42: 12:06:05 | demand-backup/1-deploy-operator | + set +o xtrace logger.go:42: 12:06:05 | demand-backup/1-deploy-operator | minio-service-86dfccd949-b46pptrue logger.go:42: 12:06:05 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-clear-eel run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID='\''some-access-key'\'' AWS_SECRET_ACCESS_KEY='\''some-secret-key'\'' AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' logger.go:42: 12:06:08 | demand-backup/1-deploy-operator | make_bucket: operator-testing logger.go:42: 12:06:09 | demand-backup/1-deploy-operator | pod "aws-cli" deleted [controller-runtime] log.SetLogger(...) was never called; logs will not be displayed. Detected at: > goroutine 35 [running]: > runtime/debug.Stack() > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/runtime/debug/stack.go:24 +0x5e > sigs.k8s.io/controller-runtime/pkg/log.eventuallyFulfillRoot() > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/log.go:60 +0xcd > sigs.k8s.io/controller-runtime/pkg/log.(*delegatingLogSink).WithName(0xc000299c00, {0x184a055, 0x14}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/deleg.go:147 +0x3e > github.com/go-logr/logr.Logger.WithName({{0x1acb7d8, 0xc000299c00}, 0x0}, {0x184a055?, 0xc000379f80?}) > /home/mowsiany/go/pkg/mod/github.com/go-logr/logr@v1.2.4/logr.go:336 +0x36 > sigs.k8s.io/controller-runtime/pkg/client.newClient(0x131eae8?, {0x0, 0xc00042caf0, {0x1accd90, 0xc000720040}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:122 +0xf1 > sigs.k8s.io/controller-runtime/pkg/client.New(0xc00015a248?, {0x0, 0xc00042caf0, {0x1accd90, 0xc000720040}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:103 +0x7d > github.com/kudobuilder/kuttl/pkg/test/utils.NewRetryClient(0xc00015a248, {0x0, 0xc00042caf0, {0x1accd90, 0xc000720040}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/utils/kubernetes.go:177 +0x127 > github.com/kudobuilder/kuttl/pkg/test.(*Harness).Client(0xc0004ce608, 0xe6?) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:323 +0x18e > github.com/kudobuilder/kuttl/pkg/test.(*Step).Create(0xc00050ea90, 0xc0003a0ea0, {0xc000049b90, 0x14}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:177 +0x63 > github.com/kudobuilder/kuttl/pkg/test.(*Step).Run(0xc00050ea90, 0xc0003a0ea0, {0xc000049b90, 0x14}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:457 +0x24a > github.com/kudobuilder/kuttl/pkg/test.(*Case).Run(0xc000119540, 0xc0003a0ea0, 0xc000532240) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/case.go:373 +0xaeb > github.com/kudobuilder/kuttl/pkg/test.(*Harness).RunTests.func1.1(0xc0003a0ea0) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:401 +0x12e > testing.tRunner(0xc0003a0ea0, 0xc00039e1f8) > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1689 +0xfb > created by testing.(*T).Run in goroutine 34 > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1742 +0x390 logger.go:42: 12:06:09 | demand-backup/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 12:06:09 | demand-backup/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 12:06:09 | demand-backup/1-deploy-operator | INFO Found 1 resource(s). logger.go:42: 12:06:09 | demand-backup/1-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 12:06:09 | demand-backup/1-deploy-operator | percona-server-mysql-operator ps-operator 1 logger.go:42: 12:06:09 | demand-backup/1-deploy-operator | ASSERT PASS logger.go:42: 12:06:09 | demand-backup/1-deploy-operator | test step completed 1-deploy-operator logger.go:42: 12:06:09 | demand-backup/2-create-cluster | starting test step 2-create-cluster logger.go:42: 12:06:09 | demand-backup/2-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval ".spec.mysql.size=3" - \ | yq eval ".spec.proxy.haproxy.enabled=true" - \ | yq eval ".spec.proxy.haproxy.size=3" - \ | yq eval ".spec.orchestrator.enabled=true" - \ | yq eval ".spec.orchestrator.size=3" - \ | yq eval '.spec.backup.storages.minio.type="s3"' - \ | yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - \ | yq eval ".spec.backup.storages.minio.s3.endpointUrl=\"http://minio-service.${NAMESPACE}:9000\"" - \ | yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.aws-s3.type="s3"' - \ | yq eval ".spec.backup.storages.aws-s3.verifyTLS=true" - \ | yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - \ | yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - \ | yq eval ".spec.backup.storages.gcp-cs.verifyTLS=true" - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - \ | yq eval '.spec.backup.storages.azure-blob.type="azure"' - \ | yq eval ".spec.backup.storages.azure-blob.verifyTLS=true" - \ | yq eval '.spec.backup.storages.azure-blob.azure.containerName="operator-testing"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 12:06:09 | demand-backup/2-create-cluster | + source ../../functions logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ realpath ../../.. logger.go:42: 12:06:09 | demand-backup/2-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | ++++ pwd logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:06:09 | demand-backup/2-create-cluster | ++ test_name=demand-backup logger.go:42: 12:06:09 | demand-backup/2-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:06:09 | demand-backup/2-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ GIT_BRANCH=PR-916 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export MINIO_VER=5.4.0 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ MINIO_VER=5.4.0 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:09 | demand-backup/2-create-cluster | ++++ which gdate logger.go:42: 12:06:09 | demand-backup/2-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:06:09 | demand-backup/2-create-cluster | ++++ which date logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ date=/usr/bin/date logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ oc get projects logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ : logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ kubectl get nodes logger.go:42: 12:06:09 | demand-backup/2-create-cluster | +++ grep '^minikube' logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ oc get projects logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + get_cr logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + local name_suffix= logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.mysql.size=3 - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.endpointUrl="http://minio-service.kuttl-test-clear-eel:9000"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.type="azure"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.azure-blob.verifyTLS=true - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.size=3 - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.type="s3"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.aws-s3.verifyTLS=true - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.containerName="operator-testing"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.type="s3"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + kubectl -n kuttl-test-clear-eel apply -f - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.gcp-cs.verifyTLS=true - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-916-a5bda550"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + '[' -n '' ']' logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 12:06:10 | demand-backup/2-create-cluster | ++ printf '.metadata.name="%s"' demand-backup logger.go:42: 12:06:10 | demand-backup/2-create-cluster | + yq eval '.metadata.name="demand-backup"' /mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy/cr.yaml logger.go:42: 12:06:11 | demand-backup/2-create-cluster | perconaservermysql.ps.percona.com/demand-backup created logger.go:42: 12:09:22 | demand-backup/2-create-cluster | test step completed 2-create-cluster logger.go:42: 12:09:22 | demand-backup/3-write-data | starting test step 3-write-data logger.go:42: 12:09:22 | demand-backup/3-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"] logger.go:42: 12:09:22 | demand-backup/3-write-data | + source ../../functions logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ realpath ../../.. logger.go:42: 12:09:22 | demand-backup/3-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:09:22 | demand-backup/3-write-data | ++++ pwd logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:09:22 | demand-backup/3-write-data | ++ test_name=demand-backup logger.go:42: 12:09:22 | demand-backup/3-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:09:22 | demand-backup/3-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:09:22 | demand-backup/3-write-data | ++++ which gdate logger.go:42: 12:09:22 | demand-backup/3-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:09:22 | demand-backup/3-write-data | ++++ which date logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ date=/usr/bin/date logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ oc get projects logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ : logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ kubectl get nodes logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ grep '^minikube' logger.go:42: 12:09:22 | demand-backup/3-write-data | ++ oc get projects logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 12:09:22 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:09:23 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:09:23 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 12:09:23 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 12:09:23 | demand-backup/3-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:09:23 | demand-backup/3-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 12:09:23 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:09:23 | demand-backup/3-write-data | + local pod= logger.go:42: 12:09:23 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 12:09:23 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:09:23 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 12:09:23 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 12:09:23 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 12:09:23 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 12:09:23 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 12:09:23 | demand-backup/3-write-data | + kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:09:23 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 12:09:23 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:09:24 | demand-backup/3-write-data | + : logger.go:42: 12:09:24 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 12:09:24 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:09:25 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:09:25 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 12:09:25 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 12:09:25 | demand-backup/3-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:09:25 | demand-backup/3-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)' logger.go:42: 12:09:25 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:09:25 | demand-backup/3-write-data | + local pod= logger.go:42: 12:09:25 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 12:09:25 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:09:25 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 12:09:25 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 12:09:25 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 12:09:25 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 12:09:26 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 12:09:26 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 12:09:26 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:09:26 | demand-backup/3-write-data | + kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:09:27 | demand-backup/3-write-data | + : logger.go:42: 12:09:27 | demand-backup/3-write-data | test step completed 3-write-data logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | starting test step 4-move-primary-before-backup logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary_pod_from_label="$(get_primary_from_label)" kubectl delete pod -n ${NAMESPACE} ${primary_pod_from_label} wait_cluster_consistency_async "${test_name}" "3" "3" new_primary_pod_from_label="$(get_primary_from_label)" if [ "${primary_pod_from_label}" == "${new_primary_pod_from_label}" ]; then echo "Old (${primary_pod_from_label}) and new (${new_primary_pod_from_label}) primary are the same (the failover didn't happen)!" exit 1 fi] logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | + source ../../functions logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ realpath ../../.. logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | ++++ pwd logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | ++ test_name=demand-backup logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ GIT_BRANCH=PR-916 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export MINIO_VER=5.4.0 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ MINIO_VER=5.4.0 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | ++++ which gdate logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | ++++ which date logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ date=/usr/bin/date logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ oc get projects logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ : logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ kubectl get nodes logger.go:42: 12:09:27 | demand-backup/4-move-primary-before-backup | +++ grep '^minikube' logger.go:42: 12:09:28 | demand-backup/4-move-primary-before-backup | ++ oc get projects logger.go:42: 12:09:28 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 12:09:28 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-clear-eel get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 12:09:28 | demand-backup/4-move-primary-before-backup | + primary_pod_from_label=demand-backup-mysql-0 logger.go:42: 12:09:28 | demand-backup/4-move-primary-before-backup | + kubectl delete pod -n kuttl-test-clear-eel demand-backup-mysql-0 logger.go:42: 12:09:28 | demand-backup/4-move-primary-before-backup | pod "demand-backup-mysql-0" deleted logger.go:42: 12:09:48 | demand-backup/4-move-primary-before-backup | + wait_cluster_consistency_async demand-backup 3 3 logger.go:42: 12:09:48 | demand-backup/4-move-primary-before-backup | + local cluster_name=demand-backup logger.go:42: 12:09:48 | demand-backup/4-move-primary-before-backup | + local cluster_size=3 logger.go:42: 12:09:48 | demand-backup/4-move-primary-before-backup | + local orc_size=3 logger.go:42: 12:09:48 | demand-backup/4-move-primary-before-backup | + '[' -z 3 ']' logger.go:42: 12:09:48 | demand-backup/4-move-primary-before-backup | + sleep 7 logger.go:42: 12:09:55 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:09:56 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 12:09:56 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 12:09:56 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 12:09:56 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 12:10:11 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:10:11 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 12:10:11 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 12:10:11 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 12:10:11 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 12:10:26 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:10:27 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 12:10:27 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 12:10:27 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 12:10:27 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 12:10:42 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:10:42 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 12:10:42 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 12:10:42 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 12:10:42 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 12:10:57 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:10:57 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 12:10:57 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.mysql.ready}' logger.go:42: 12:10:58 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 12:10:58 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.orchestrator.ready}' logger.go:42: 12:10:58 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 12:10:58 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.orchestrator.state}' logger.go:42: 12:10:59 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 12:10:59 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-clear-eel -o 'jsonpath={.status.state}' logger.go:42: 12:10:59 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 12:10:59 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 12:10:59 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-clear-eel get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 12:11:00 | demand-backup/4-move-primary-before-backup | + new_primary_pod_from_label=demand-backup-mysql-2 logger.go:42: 12:11:00 | demand-backup/4-move-primary-before-backup | + '[' demand-backup-mysql-0 == demand-backup-mysql-2 ']' logger.go:42: 12:11:00 | demand-backup/4-move-primary-before-backup | test step completed 4-move-primary-before-backup logger.go:42: 12:11:00 | demand-backup/5-create-backup-minio | starting test step 5-create-backup-minio logger.go:42: 12:11:00 | demand-backup/5-create-backup-minio | PerconaServerMySQLBackup:kuttl-test-clear-eel/demand-backup-minio created logger.go:42: 12:11:11 | demand-backup/5-create-backup-minio | test step completed 5-create-backup-minio logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | starting test step 6-check-password-leak logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | + source ../../functions logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ realpath ../../.. logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++++ pwd logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++ test_name=demand-backup logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ GIT_BRANCH=PR-916 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export MINIO_VER=5.4.0 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ MINIO_VER=5.4.0 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++++ which gdate logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++++ which date logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ date=/usr/bin/date logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ oc get projects logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ : logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ kubectl get nodes logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | +++ grep '^minikube' logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++ oc get projects logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | + check_passwords_leak logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | + local secrets logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | + local passwords logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | + local pods logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 12:11:11 | demand-backup/6-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12") or test("namespace")) | not) | .value' logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | + secrets= logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | + passwords=' ' logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pods -o name logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | + pods='demand-backup-haproxy-0 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | demand-backup-haproxy-1 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | demand-backup-haproxy-2 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | demand-backup-mysql-0 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | demand-backup-mysql-1 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | demand-backup-mysql-2 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | demand-backup-orc-0 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | demand-backup-orc-1 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | demand-backup-orc-2 logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | minio-service-86dfccd949-b46pp logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | mysql-client logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | xb-demand-backup-minio-minio-d7xk7' logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | + collect_logs kuttl-test-clear-eel logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | + local containers logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | + local count logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | + NS=kuttl-test-clear-eel logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:12 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:13 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:11:13 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:13 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-0 -c haproxy logger.go:42: 12:11:13 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 12:11:13 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 12:11:13 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:13 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-0 -c mysql-monit logger.go:42: 12:11:14 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 12:11:14 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 12:11:14 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:14 | demand-backup/6-check-password-leak | logger.go:42: 12:11:14 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:14 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:15 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:11:15 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:15 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-1 -c haproxy logger.go:42: 12:11:15 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 12:11:15 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 12:11:15 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:15 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-1 -c mysql-monit logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:16 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-2 -c haproxy logger.go:42: 12:11:17 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 12:11:17 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 12:11:17 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:17 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-2 -c mysql-monit logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:18 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-0 -c mysql logger.go:42: 12:11:19 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 12:11:19 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 12:11:19 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:19 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-0 -c xtrabackup logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-0 -c pt-heartbeat logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:20 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:21 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:11:21 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:21 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-1 -c mysql logger.go:42: 12:11:21 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 12:11:21 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 12:11:21 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:21 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-1 -c xtrabackup logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-1 -c pt-heartbeat logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:22 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:23 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:11:23 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:23 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-2 -c mysql logger.go:42: 12:11:23 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 12:11:23 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 12:11:23 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:23 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-2 -c xtrabackup logger.go:42: 12:11:24 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 12:11:24 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 12:11:24 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:24 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-2 -c pt-heartbeat logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:25 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-0 -c orc logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-0 -c mysql-monit logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:27 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:28 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:11:28 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:28 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-1 -c orc logger.go:42: 12:11:28 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 12:11:28 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 12:11:28 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:28 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-1 -c mysql-monit logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:29 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-2 -c orc logger.go:42: 12:11:30 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 12:11:30 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 12:11:30 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:30 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-2 -c mysql-monit logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod minio-service-86dfccd949-b46pp -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | + containers=minio logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:31 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs minio-service-86dfccd949-b46pp -c minio logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-86dfccd949-b46pp-minio.txt logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-86dfccd949-b46pp-minio.txt logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | + containers=mysql-client logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:32 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs mysql-client -c mysql-client logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod xb-demand-backup-minio-minio-d7xk7 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | + containers=xtrabackup logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:33 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-clear-eel logs xb-demand-backup-minio-minio-d7xk7 -c xtrabackup logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-d7xk7-xtrabackup.txt logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-d7xk7-xtrabackup.txt logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + '[' -n ps-operator ']' logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | ++ kubectl -n ps-operator get pods -o name logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + pods=percona-server-mysql-operator-5c45849dc9-px6h9 logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + collect_logs ps-operator logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + local containers logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + local count logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + NS=ps-operator logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | ++ kubectl -n ps-operator get pod percona-server-mysql-operator-5c45849dc9-px6h9 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + containers=manager logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:11:34 | demand-backup/6-check-password-leak | + kubectl -n ps-operator logs percona-server-mysql-operator-5c45849dc9-px6h9 -c manager logger.go:42: 12:11:35 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-5c45849dc9-px6h9-manager.txt logger.go:42: 12:11:35 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-5c45849dc9-px6h9-manager.txt logger.go:42: 12:11:35 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:11:35 | demand-backup/6-check-password-leak | logger.go:42: 12:11:35 | demand-backup/6-check-password-leak | test step completed 6-check-password-leak logger.go:42: 12:11:35 | demand-backup/7-delete-data | starting test step 7-delete-data logger.go:42: 12:11:35 | demand-backup/7-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 12:11:35 | demand-backup/7-delete-data | + source ../../functions logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ realpath ../../.. logger.go:42: 12:11:35 | demand-backup/7-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:11:35 | demand-backup/7-delete-data | ++++ pwd logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:11:35 | demand-backup/7-delete-data | ++ test_name=demand-backup logger.go:42: 12:11:35 | demand-backup/7-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:11:35 | demand-backup/7-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:11:35 | demand-backup/7-delete-data | ++++ which gdate logger.go:42: 12:11:35 | demand-backup/7-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:11:35 | demand-backup/7-delete-data | ++++ which date logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ date=/usr/bin/date logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ oc get projects logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ : logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ kubectl get nodes logger.go:42: 12:11:35 | demand-backup/7-delete-data | +++ grep '^minikube' logger.go:42: 12:11:36 | demand-backup/7-delete-data | ++ oc get projects logger.go:42: 12:11:36 | demand-backup/7-delete-data | +++ get_cluster_name logger.go:42: 12:11:36 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:11:36 | demand-backup/7-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:11:36 | demand-backup/7-delete-data | ++ local cluster=demand-backup logger.go:42: 12:11:36 | demand-backup/7-delete-data | ++ echo demand-backup-haproxy logger.go:42: 12:11:36 | demand-backup/7-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:11:36 | demand-backup/7-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 12:11:36 | demand-backup/7-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:11:36 | demand-backup/7-delete-data | + local pod= logger.go:42: 12:11:36 | demand-backup/7-delete-data | ++ get_client_pod logger.go:42: 12:11:36 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:11:36 | demand-backup/7-delete-data | + client_pod=mysql-client logger.go:42: 12:11:36 | demand-backup/7-delete-data | + wait_pod mysql-client logger.go:42: 12:11:36 | demand-backup/7-delete-data | + local pod=mysql-client logger.go:42: 12:11:36 | demand-backup/7-delete-data | + set +o xtrace logger.go:42: 12:11:37 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 12:11:37 | demand-backup/7-delete-data | + kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:11:37 | demand-backup/7-delete-data | + sed -e 's/mysql: //' logger.go:42: 12:11:37 | demand-backup/7-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:11:38 | demand-backup/7-delete-data | + : logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ get_cluster_name logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:11:38 | demand-backup/7-delete-data | + cluster_name=demand-backup logger.go:42: 12:11:38 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 12:11:38 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 12:11:38 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 12:11:38 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 12:11:39 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 12:11:39 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:39 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:11:39 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ : logger.go:42: 12:11:40 | demand-backup/7-delete-data | + data= logger.go:42: 12:11:40 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 04-delete-data-minio-0 --from-literal=data= logger.go:42: 12:11:40 | demand-backup/7-delete-data | configmap/04-delete-data-minio-0 created logger.go:42: 12:11:40 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 12:11:40 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 12:11:40 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 12:11:40 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 12:11:41 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 12:11:41 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:41 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:11:41 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:11:42 | demand-backup/7-delete-data | ++ : logger.go:42: 12:11:42 | demand-backup/7-delete-data | + data= logger.go:42: 12:11:42 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 04-delete-data-minio-1 --from-literal=data= logger.go:42: 12:11:42 | demand-backup/7-delete-data | configmap/04-delete-data-minio-1 created logger.go:42: 12:11:42 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 12:11:42 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:42 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:11:42 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:42 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 12:11:42 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 12:11:42 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:11:43 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 12:11:43 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 12:11:43 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 12:11:43 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 12:11:43 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 12:11:43 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:11:43 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:11:43 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:11:44 | demand-backup/7-delete-data | ++ : logger.go:42: 12:11:44 | demand-backup/7-delete-data | + data= logger.go:42: 12:11:44 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 04-delete-data-minio-2 --from-literal=data= logger.go:42: 12:11:44 | demand-backup/7-delete-data | configmap/04-delete-data-minio-2 created logger.go:42: 12:11:45 | demand-backup/7-delete-data | test step completed 7-delete-data logger.go:42: 12:11:45 | demand-backup/8-restore-from-minio | starting test step 8-restore-from-minio logger.go:42: 12:11:45 | demand-backup/8-restore-from-minio | PerconaServerMySQLRestore:kuttl-test-clear-eel/demand-backup-restore-minio created logger.go:42: 12:16:57 | demand-backup/8-restore-from-minio | test step completed 8-restore-from-minio logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | starting test step 9-check-password-leak logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | + source ../../functions logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ realpath ../../.. logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | ++++ pwd logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | ++ test_name=demand-backup logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ GIT_BRANCH=PR-916 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export MINIO_VER=5.4.0 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ MINIO_VER=5.4.0 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | ++++ which gdate logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | ++++ which date logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ date=/usr/bin/date logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ oc get projects logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ : logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ kubectl get nodes logger.go:42: 12:16:57 | demand-backup/9-check-password-leak | +++ grep '^minikube' logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | ++ oc get projects logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + check_passwords_leak logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + local secrets logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + local passwords logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + local pods logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12") or test("namespace")) | not) | .value' logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + secrets= logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + passwords=' ' logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pods -o name logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + pods='demand-backup-haproxy-0 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | demand-backup-haproxy-1 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | demand-backup-haproxy-2 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | demand-backup-mysql-0 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | demand-backup-mysql-1 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | demand-backup-mysql-2 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | demand-backup-orc-0 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | demand-backup-orc-1 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | demand-backup-orc-2 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | minio-service-86dfccd949-b46pp logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | mysql-client logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | xb-demand-backup-minio-minio-d7xk7 logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | xb-restore-demand-backup-restore-minio-clrjt' logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + collect_logs kuttl-test-clear-eel logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + local containers logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + local count logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + NS=kuttl-test-clear-eel logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:16:58 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:16:59 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:16:59 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:16:59 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-0 -c haproxy logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-0 -c mysql-monit logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:00 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:01 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:17:01 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:01 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-1 -c haproxy logger.go:42: 12:17:01 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 12:17:01 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 12:17:01 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:01 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-1 -c mysql-monit logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:02 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-2 -c haproxy logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-haproxy-2 -c mysql-monit logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:03 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:04 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:17:04 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:04 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-0 -c mysql logger.go:42: 12:17:05 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 12:17:05 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 12:17:05 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:05 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-0 -c xtrabackup logger.go:42: 12:17:05 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 12:17:05 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 12:17:05 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:05 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-0 -c pt-heartbeat logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:06 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-1 -c mysql logger.go:42: 12:17:07 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 12:17:07 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 12:17:07 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:07 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-1 -c xtrabackup logger.go:42: 12:17:07 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 12:17:07 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 12:17:07 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:07 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-1 -c pt-heartbeat logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:08 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-2 -c mysql logger.go:42: 12:17:09 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 12:17:09 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 12:17:09 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:09 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-2 -c xtrabackup logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-mysql-2 -c pt-heartbeat logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:10 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:11 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:17:11 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:11 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-0 -c orc logger.go:42: 12:17:11 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 12:17:11 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 12:17:11 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:11 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-0 -c mysql-monit logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:12 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-1 -c orc logger.go:42: 12:17:13 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 12:17:13 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 12:17:13 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:13 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-1 -c mysql-monit logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod demand-backup-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:14 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-2 -c orc logger.go:42: 12:17:15 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 12:17:15 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 12:17:15 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:15 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs demand-backup-orc-2 -c mysql-monit logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod minio-service-86dfccd949-b46pp -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + containers=minio logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs minio-service-86dfccd949-b46pp -c minio logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-86dfccd949-b46pp-minio.txt logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-86dfccd949-b46pp-minio.txt logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:16 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | + containers=mysql-client logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs mysql-client -c mysql-client logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:17 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod xb-demand-backup-minio-minio-d7xk7 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | + containers=xtrabackup logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs xb-demand-backup-minio-minio-d7xk7 -c xtrabackup logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-d7xk7-xtrabackup.txt logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-d7xk7-xtrabackup.txt logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:18 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-clear-eel get pod xb-restore-demand-backup-restore-minio-clrjt -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:19 | demand-backup/9-check-password-leak | + containers=xtrabackup logger.go:42: 12:17:19 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:19 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-clear-eel logs xb-restore-demand-backup-restore-minio-clrjt -c xtrabackup logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-restore-demand-backup-restore-minio-clrjt-xtrabackup.txt logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-restore-demand-backup-restore-minio-clrjt-xtrabackup.txt logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + '[' -n ps-operator ']' logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | ++ kubectl -n ps-operator get pods -o name logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + pods=percona-server-mysql-operator-5c45849dc9-px6h9 logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + collect_logs ps-operator logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + local containers logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + local count logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + NS=ps-operator logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | ++ kubectl -n ps-operator get pod percona-server-mysql-operator-5c45849dc9-px6h9 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + containers=manager logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:17:20 | demand-backup/9-check-password-leak | + kubectl -n ps-operator logs percona-server-mysql-operator-5c45849dc9-px6h9 -c manager logger.go:42: 12:17:21 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-5c45849dc9-px6h9-manager.txt logger.go:42: 12:17:21 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-5c45849dc9-px6h9-manager.txt logger.go:42: 12:17:21 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:17:21 | demand-backup/9-check-password-leak | logger.go:42: 12:17:21 | demand-backup/9-check-password-leak | test step completed 9-check-password-leak logger.go:42: 12:17:21 | demand-backup/10-read-data | starting test step 10-read-data logger.go:42: 12:17:21 | demand-backup/10-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 12:17:21 | demand-backup/10-read-data | + source ../../functions logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ realpath ../../.. logger.go:42: 12:17:21 | demand-backup/10-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:21 | demand-backup/10-read-data | ++++ pwd logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:17:21 | demand-backup/10-read-data | ++ test_name=demand-backup logger.go:42: 12:17:21 | demand-backup/10-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:17:21 | demand-backup/10-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:17:21 | demand-backup/10-read-data | ++++ which gdate logger.go:42: 12:17:21 | demand-backup/10-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:17:21 | demand-backup/10-read-data | ++++ which date logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ date=/usr/bin/date logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ oc get projects logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ : logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ kubectl get nodes logger.go:42: 12:17:21 | demand-backup/10-read-data | +++ grep '^minikube' logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ oc get projects logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ get_cluster_name logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:17:22 | demand-backup/10-read-data | + cluster_name=demand-backup logger.go:42: 12:17:22 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ local pod= logger.go:42: 12:17:22 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 12:17:22 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 12:17:22 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 12:17:23 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 12:17:23 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:23 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:17:23 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:17:24 | demand-backup/10-read-data | + data=100500 logger.go:42: 12:17:24 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-minio-0 --from-literal=data=100500 logger.go:42: 12:17:24 | demand-backup/10-read-data | configmap/06-read-data-minio-0 created logger.go:42: 12:17:24 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 12:17:24 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:24 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:17:24 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:24 | demand-backup/10-read-data | ++ local pod= logger.go:42: 12:17:24 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 12:17:24 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:17:24 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 12:17:24 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 12:17:24 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 12:17:24 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 12:17:25 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 12:17:25 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:25 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:17:25 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:17:26 | demand-backup/10-read-data | + data=100500 logger.go:42: 12:17:26 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-minio-1 --from-literal=data=100500 logger.go:42: 12:17:26 | demand-backup/10-read-data | configmap/06-read-data-minio-1 created logger.go:42: 12:17:26 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 12:17:26 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:26 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:17:26 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:26 | demand-backup/10-read-data | ++ local pod= logger.go:42: 12:17:26 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 12:17:26 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:17:27 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 12:17:27 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 12:17:27 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 12:17:27 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 12:17:27 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 12:17:27 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:27 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:17:27 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:17:28 | demand-backup/10-read-data | + data=100500 logger.go:42: 12:17:28 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-minio-2 --from-literal=data=100500 logger.go:42: 12:17:28 | demand-backup/10-read-data | configmap/06-read-data-minio-2 created logger.go:42: 12:17:29 | demand-backup/10-read-data | test step completed 10-read-data logger.go:42: 12:17:29 | demand-backup/11-delete-data | starting test step 11-delete-data logger.go:42: 12:17:29 | demand-backup/11-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 12:17:29 | demand-backup/11-delete-data | + source ../../functions logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ realpath ../../.. logger.go:42: 12:17:29 | demand-backup/11-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:29 | demand-backup/11-delete-data | ++++ pwd logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:17:29 | demand-backup/11-delete-data | ++ test_name=demand-backup logger.go:42: 12:17:29 | demand-backup/11-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:17:29 | demand-backup/11-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:17:29 | demand-backup/11-delete-data | ++++ which gdate logger.go:42: 12:17:29 | demand-backup/11-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:17:29 | demand-backup/11-delete-data | ++++ which date logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ date=/usr/bin/date logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ oc get projects logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ : logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ kubectl get nodes logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ grep '^minikube' logger.go:42: 12:17:29 | demand-backup/11-delete-data | ++ oc get projects logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ get_cluster_name logger.go:42: 12:17:29 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:17:30 | demand-backup/11-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:17:30 | demand-backup/11-delete-data | ++ local cluster=demand-backup logger.go:42: 12:17:30 | demand-backup/11-delete-data | ++ echo demand-backup-haproxy logger.go:42: 12:17:30 | demand-backup/11-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:17:30 | demand-backup/11-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 12:17:30 | demand-backup/11-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:17:30 | demand-backup/11-delete-data | + local pod= logger.go:42: 12:17:30 | demand-backup/11-delete-data | ++ get_client_pod logger.go:42: 12:17:30 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:17:30 | demand-backup/11-delete-data | + client_pod=mysql-client logger.go:42: 12:17:30 | demand-backup/11-delete-data | + wait_pod mysql-client logger.go:42: 12:17:30 | demand-backup/11-delete-data | + local pod=mysql-client logger.go:42: 12:17:30 | demand-backup/11-delete-data | + set +o xtrace logger.go:42: 12:17:31 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 12:17:31 | demand-backup/11-delete-data | + sed -e 's/mysql: //' logger.go:42: 12:17:31 | demand-backup/11-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:17:31 | demand-backup/11-delete-data | + kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:17:32 | demand-backup/11-delete-data | + : logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ get_cluster_name logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:17:32 | demand-backup/11-delete-data | + cluster_name=demand-backup logger.go:42: 12:17:32 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 12:17:32 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 12:17:32 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 12:17:32 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 12:17:33 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 12:17:33 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:17:33 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:17:33 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ : logger.go:42: 12:17:34 | demand-backup/11-delete-data | + data= logger.go:42: 12:17:34 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 04-delete-data-minio-backup-source-0 --from-literal=data= logger.go:42: 12:17:34 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-0 created logger.go:42: 12:17:34 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 12:17:34 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 12:17:34 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 12:17:34 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 12:17:35 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 12:17:35 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:35 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:17:35 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:17:35 | demand-backup/11-delete-data | ++ : logger.go:42: 12:17:35 | demand-backup/11-delete-data | + data= logger.go:42: 12:17:35 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 04-delete-data-minio-backup-source-1 --from-literal=data= logger.go:42: 12:17:36 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-1 created logger.go:42: 12:17:36 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 12:17:36 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:36 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:17:36 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:36 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 12:17:36 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 12:17:36 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:17:36 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 12:17:36 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 12:17:36 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 12:17:36 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 12:17:37 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 12:17:37 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:17:37 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:17:37 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:17:37 | demand-backup/11-delete-data | ++ : logger.go:42: 12:17:37 | demand-backup/11-delete-data | + data= logger.go:42: 12:17:37 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 04-delete-data-minio-backup-source-2 --from-literal=data= logger.go:42: 12:17:38 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-2 created logger.go:42: 12:17:39 | demand-backup/11-delete-data | test step completed 11-delete-data logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | starting test step 12-restore-from-minio-backup-source logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | running command: [sh -c set -o errexit set -o xtrace source ../../functions storage_name="minio" backup_name="demand-backup-minio" restore_name="demand-backup-restore-minio-backup-source" cluster_name="${test_name}${name_suffix:+-$name_suffix}" destination=$(kubectl -n "${NAMESPACE}" get ps-backup "${backup_name}" -o jsonpath='{.status.destination}') cat "${DEPLOY_DIR}/restore.yaml" \ | yq eval "$(printf '.metadata.name="%s"' "${restore_name}")" - \ | yq eval "$(printf '.spec.clusterName="%s"' "${cluster_name}")" - \ | yq eval "del(.spec.backupName)" - \ | yq eval "$(printf '.spec.backupSource.destination="%s"' "${destination}")" - \ | yq eval '.spec.backupSource.storage.type="s3"' - \ | yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - \ | yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - \ | yq eval "$(printf '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.%s:9000"' "${NAMESPACE}")" - \ | yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - \ | kubectl apply -n "${NAMESPACE}" -f -] logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + source ../../functions logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ realpath ../../.. logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++++ pwd logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ test_name=demand-backup logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ GIT_BRANCH=PR-916 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export MINIO_VER=5.4.0 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ MINIO_VER=5.4.0 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++++ which gdate logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++++ which date logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ date=/usr/bin/date logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ oc get projects logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ : logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ kubectl get nodes logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | +++ grep '^minikube' logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ oc get projects logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + storage_name=minio logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + backup_name=demand-backup-minio logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + restore_name=demand-backup-restore-minio-backup-source logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + cluster_name=demand-backup logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ kubectl -n kuttl-test-clear-eel get ps-backup demand-backup-minio -o 'jsonpath={.status.destination}' logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + destination=s3://operator-testing/demand-backup-2025-05-27-12:11:00-full logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.metadata.name="%s"' demand-backup-restore-minio-backup-source logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval 'del(.spec.backupName)' - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.metadata.name="demand-backup-restore-minio-backup-source"' - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.clusterName="%s"' demand-backup logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.clusterName="demand-backup"' - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + cat /mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy/restore.yaml logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.type="s3"' - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.backupSource.destination="%s"' s3://operator-testing/demand-backup-2025-05-27-12:11:00-full logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.destination="s3://operator-testing/demand-backup-2025-05-27-12:11:00-full"' - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + kubectl apply -n kuttl-test-clear-eel -f - logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.%s:9000"' kuttl-test-clear-eel logger.go:42: 12:17:39 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.kuttl-test-clear-eel:9000"' - logger.go:42: 12:17:40 | demand-backup/12-restore-from-minio-backup-source | perconaservermysqlrestore.ps.percona.com/demand-backup-restore-minio-backup-source created logger.go:42: 12:22:39 | demand-backup/12-restore-from-minio-backup-source | test step completed 12-restore-from-minio-backup-source logger.go:42: 12:22:39 | demand-backup/13-read-data | starting test step 13-read-data logger.go:42: 12:22:39 | demand-backup/13-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 09-read-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 12:22:39 | demand-backup/13-read-data | + source ../../functions logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ realpath ../../.. logger.go:42: 12:22:39 | demand-backup/13-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:22:39 | demand-backup/13-read-data | ++++ pwd logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:22:39 | demand-backup/13-read-data | ++ test_name=demand-backup logger.go:42: 12:22:39 | demand-backup/13-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:22:39 | demand-backup/13-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:22:39 | demand-backup/13-read-data | ++++ which gdate logger.go:42: 12:22:39 | demand-backup/13-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:22:39 | demand-backup/13-read-data | ++++ which date logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ date=/usr/bin/date logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ oc get projects logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ : logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ kubectl get nodes logger.go:42: 12:22:39 | demand-backup/13-read-data | +++ grep '^minikube' logger.go:42: 12:22:40 | demand-backup/13-read-data | ++ oc get projects logger.go:42: 12:22:40 | demand-backup/13-read-data | ++ get_cluster_name logger.go:42: 12:22:40 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:22:40 | demand-backup/13-read-data | + cluster_name=demand-backup logger.go:42: 12:22:40 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 12:22:40 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:40 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:22:40 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:40 | demand-backup/13-read-data | ++ local pod= logger.go:42: 12:22:40 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 12:22:40 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:22:41 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 12:22:41 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 12:22:41 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 12:22:41 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 12:22:41 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 12:22:41 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:41 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:22:41 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:22:42 | demand-backup/13-read-data | + data=100500 logger.go:42: 12:22:42 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-clear-eel 09-read-data-minio-backup-source-0 --from-literal=data=100500 logger.go:42: 12:22:43 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-0 created logger.go:42: 12:22:43 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ local pod= logger.go:42: 12:22:43 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 12:22:43 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 12:22:43 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:22:43 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:22:44 | demand-backup/13-read-data | + data=100500 logger.go:42: 12:22:44 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-clear-eel 09-read-data-minio-backup-source-1 --from-literal=data=100500 logger.go:42: 12:22:45 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-1 created logger.go:42: 12:22:45 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ local pod= logger.go:42: 12:22:45 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 12:22:45 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 12:22:45 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:22:45 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:22:46 | demand-backup/13-read-data | + data=100500 logger.go:42: 12:22:46 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-clear-eel 09-read-data-minio-backup-source-2 --from-literal=data=100500 logger.go:42: 12:22:47 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-2 created logger.go:42: 12:22:47 | demand-backup/13-read-data | test step completed 13-read-data logger.go:42: 12:22:47 | demand-backup/14-create-backup-s3 | starting test step 14-create-backup-s3 logger.go:42: 12:22:48 | demand-backup/14-create-backup-s3 | PerconaServerMySQLBackup:kuttl-test-clear-eel/demand-backup-s3 created logger.go:42: 12:22:58 | demand-backup/14-create-backup-s3 | test step completed 14-create-backup-s3 logger.go:42: 12:22:58 | demand-backup/15-delete-data | starting test step 15-delete-data logger.go:42: 12:22:58 | demand-backup/15-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 08-delete-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 12:22:58 | demand-backup/15-delete-data | + source ../../functions logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ realpath ../../.. logger.go:42: 12:22:58 | demand-backup/15-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:22:58 | demand-backup/15-delete-data | ++++ pwd logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:22:58 | demand-backup/15-delete-data | ++ test_name=demand-backup logger.go:42: 12:22:58 | demand-backup/15-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:22:58 | demand-backup/15-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:22:58 | demand-backup/15-delete-data | ++++ which gdate logger.go:42: 12:22:58 | demand-backup/15-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:22:58 | demand-backup/15-delete-data | ++++ which date logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ date=/usr/bin/date logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ oc get projects logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ : logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ kubectl get nodes logger.go:42: 12:22:58 | demand-backup/15-delete-data | +++ grep '^minikube' logger.go:42: 12:22:59 | demand-backup/15-delete-data | ++ oc get projects logger.go:42: 12:22:59 | demand-backup/15-delete-data | +++ get_cluster_name logger.go:42: 12:22:59 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:22:59 | demand-backup/15-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:22:59 | demand-backup/15-delete-data | ++ local cluster=demand-backup logger.go:42: 12:22:59 | demand-backup/15-delete-data | ++ echo demand-backup-haproxy logger.go:42: 12:22:59 | demand-backup/15-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:22:59 | demand-backup/15-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 12:22:59 | demand-backup/15-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:22:59 | demand-backup/15-delete-data | + local pod= logger.go:42: 12:22:59 | demand-backup/15-delete-data | ++ get_client_pod logger.go:42: 12:22:59 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:23:00 | demand-backup/15-delete-data | + client_pod=mysql-client logger.go:42: 12:23:00 | demand-backup/15-delete-data | + wait_pod mysql-client logger.go:42: 12:23:00 | demand-backup/15-delete-data | + local pod=mysql-client logger.go:42: 12:23:00 | demand-backup/15-delete-data | + set +o xtrace logger.go:42: 12:23:00 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 12:23:00 | demand-backup/15-delete-data | + kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:23:00 | demand-backup/15-delete-data | + sed -e 's/mysql: //' logger.go:42: 12:23:00 | demand-backup/15-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:23:01 | demand-backup/15-delete-data | + : logger.go:42: 12:23:01 | demand-backup/15-delete-data | ++ get_cluster_name logger.go:42: 12:23:01 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:23:01 | demand-backup/15-delete-data | + cluster_name=demand-backup logger.go:42: 12:23:01 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 12:23:01 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:01 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:23:01 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:01 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 12:23:01 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 12:23:01 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:23:02 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 12:23:02 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 12:23:02 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 12:23:02 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 12:23:02 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 12:23:02 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:02 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:23:02 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:23:03 | demand-backup/15-delete-data | ++ : logger.go:42: 12:23:03 | demand-backup/15-delete-data | + data= logger.go:42: 12:23:03 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 08-delete-data-s3-0 --from-literal=data= logger.go:42: 12:23:03 | demand-backup/15-delete-data | configmap/08-delete-data-s3-0 created logger.go:42: 12:23:03 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 12:23:03 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:03 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:23:03 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:03 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 12:23:03 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 12:23:03 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:23:04 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 12:23:04 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 12:23:04 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 12:23:04 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 12:23:04 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 12:23:04 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:04 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:23:04 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:23:05 | demand-backup/15-delete-data | ++ : logger.go:42: 12:23:05 | demand-backup/15-delete-data | + data= logger.go:42: 12:23:05 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 08-delete-data-s3-1 --from-literal=data= logger.go:42: 12:23:05 | demand-backup/15-delete-data | configmap/08-delete-data-s3-1 created logger.go:42: 12:23:05 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 12:23:05 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:05 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:23:05 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:05 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 12:23:05 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 12:23:05 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:23:06 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 12:23:06 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 12:23:06 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 12:23:06 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 12:23:06 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 12:23:06 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:23:06 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:23:06 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:23:07 | demand-backup/15-delete-data | ++ : logger.go:42: 12:23:07 | demand-backup/15-delete-data | + data= logger.go:42: 12:23:07 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 08-delete-data-s3-2 --from-literal=data= logger.go:42: 12:23:07 | demand-backup/15-delete-data | configmap/08-delete-data-s3-2 created logger.go:42: 12:23:08 | demand-backup/15-delete-data | test step completed 15-delete-data logger.go:42: 12:23:08 | demand-backup/16-restore-from-s3 | starting test step 16-restore-from-s3 logger.go:42: 12:23:08 | demand-backup/16-restore-from-s3 | PerconaServerMySQLRestore:kuttl-test-clear-eel/demand-backup-restore-s3 created logger.go:42: 12:28:17 | demand-backup/16-restore-from-s3 | test step completed 16-restore-from-s3 logger.go:42: 12:28:17 | demand-backup/17-read-data | starting test step 17-read-data logger.go:42: 12:28:17 | demand-backup/17-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 12:28:17 | demand-backup/17-read-data | + source ../../functions logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ realpath ../../.. logger.go:42: 12:28:17 | demand-backup/17-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:28:17 | demand-backup/17-read-data | ++++ pwd logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:28:17 | demand-backup/17-read-data | ++ test_name=demand-backup logger.go:42: 12:28:17 | demand-backup/17-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:28:17 | demand-backup/17-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:28:17 | demand-backup/17-read-data | ++++ which gdate logger.go:42: 12:28:17 | demand-backup/17-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:28:17 | demand-backup/17-read-data | ++++ which date logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ date=/usr/bin/date logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ oc get projects logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ : logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ kubectl get nodes logger.go:42: 12:28:17 | demand-backup/17-read-data | +++ grep '^minikube' logger.go:42: 12:28:17 | demand-backup/17-read-data | ++ oc get projects logger.go:42: 12:28:17 | demand-backup/17-read-data | ++ get_cluster_name logger.go:42: 12:28:17 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:28:18 | demand-backup/17-read-data | + cluster_name=demand-backup logger.go:42: 12:28:18 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 12:28:18 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:18 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:28:18 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:18 | demand-backup/17-read-data | ++ local pod= logger.go:42: 12:28:18 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 12:28:18 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:28:18 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 12:28:18 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 12:28:18 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 12:28:18 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 12:28:19 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 12:28:19 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:28:19 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:28:19 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:19 | demand-backup/17-read-data | + data=100500 logger.go:42: 12:28:19 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-s3-0 --from-literal=data=100500 logger.go:42: 12:28:20 | demand-backup/17-read-data | configmap/06-read-data-s3-0 created logger.go:42: 12:28:20 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 12:28:20 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:20 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:28:20 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:20 | demand-backup/17-read-data | ++ local pod= logger.go:42: 12:28:20 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 12:28:20 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:28:20 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 12:28:20 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 12:28:20 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 12:28:20 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 12:28:21 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 12:28:21 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:21 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:28:21 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:28:21 | demand-backup/17-read-data | + data=100500 logger.go:42: 12:28:21 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-s3-1 --from-literal=data=100500 logger.go:42: 12:28:22 | demand-backup/17-read-data | configmap/06-read-data-s3-1 created logger.go:42: 12:28:22 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ local pod= logger.go:42: 12:28:22 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 12:28:22 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 12:28:22 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:28:22 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:28:23 | demand-backup/17-read-data | + data=100500 logger.go:42: 12:28:23 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-s3-2 --from-literal=data=100500 logger.go:42: 12:28:24 | demand-backup/17-read-data | configmap/06-read-data-s3-2 created logger.go:42: 12:28:24 | demand-backup/17-read-data | test step completed 17-read-data logger.go:42: 12:28:24 | demand-backup/18-create-backup-gcp | starting test step 18-create-backup-gcp logger.go:42: 12:28:25 | demand-backup/18-create-backup-gcp | PerconaServerMySQLBackup:kuttl-test-clear-eel/demand-backup-gcp created logger.go:42: 12:28:35 | demand-backup/18-create-backup-gcp | test step completed 18-create-backup-gcp logger.go:42: 12:28:35 | demand-backup/19-delete-data | starting test step 19-delete-data logger.go:42: 12:28:35 | demand-backup/19-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 12-delete-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 12:28:35 | demand-backup/19-delete-data | + source ../../functions logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ realpath ../../.. logger.go:42: 12:28:35 | demand-backup/19-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:28:35 | demand-backup/19-delete-data | ++++ pwd logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:28:35 | demand-backup/19-delete-data | ++ test_name=demand-backup logger.go:42: 12:28:35 | demand-backup/19-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:28:35 | demand-backup/19-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:28:35 | demand-backup/19-delete-data | ++++ which gdate logger.go:42: 12:28:35 | demand-backup/19-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:28:35 | demand-backup/19-delete-data | ++++ which date logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ date=/usr/bin/date logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ oc get projects logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ : logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ kubectl get nodes logger.go:42: 12:28:35 | demand-backup/19-delete-data | +++ grep '^minikube' logger.go:42: 12:28:36 | demand-backup/19-delete-data | ++ oc get projects logger.go:42: 12:28:36 | demand-backup/19-delete-data | +++ get_cluster_name logger.go:42: 12:28:36 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:28:36 | demand-backup/19-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:28:36 | demand-backup/19-delete-data | ++ local cluster=demand-backup logger.go:42: 12:28:36 | demand-backup/19-delete-data | ++ echo demand-backup-haproxy logger.go:42: 12:28:36 | demand-backup/19-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:28:36 | demand-backup/19-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 12:28:36 | demand-backup/19-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:28:36 | demand-backup/19-delete-data | + local pod= logger.go:42: 12:28:36 | demand-backup/19-delete-data | ++ get_client_pod logger.go:42: 12:28:36 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:28:37 | demand-backup/19-delete-data | + client_pod=mysql-client logger.go:42: 12:28:37 | demand-backup/19-delete-data | + wait_pod mysql-client logger.go:42: 12:28:37 | demand-backup/19-delete-data | + local pod=mysql-client logger.go:42: 12:28:37 | demand-backup/19-delete-data | + set +o xtrace logger.go:42: 12:28:37 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 12:28:37 | demand-backup/19-delete-data | + kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:28:37 | demand-backup/19-delete-data | + sed -e 's/mysql: //' logger.go:42: 12:28:37 | demand-backup/19-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:28:38 | demand-backup/19-delete-data | + : logger.go:42: 12:28:38 | demand-backup/19-delete-data | ++ get_cluster_name logger.go:42: 12:28:38 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:28:38 | demand-backup/19-delete-data | + cluster_name=demand-backup logger.go:42: 12:28:38 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 12:28:38 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:38 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:28:38 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:38 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 12:28:38 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 12:28:38 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:28:39 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 12:28:39 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 12:28:39 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 12:28:39 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 12:28:39 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 12:28:39 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:39 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:28:39 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:28:40 | demand-backup/19-delete-data | ++ : logger.go:42: 12:28:40 | demand-backup/19-delete-data | + data= logger.go:42: 12:28:40 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 12-delete-data-gcp-0 --from-literal=data= logger.go:42: 12:28:40 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-0 created logger.go:42: 12:28:40 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 12:28:40 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:40 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:28:40 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:40 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 12:28:40 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 12:28:40 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:28:41 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 12:28:41 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 12:28:41 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 12:28:41 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 12:28:41 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 12:28:41 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:41 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:28:41 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:28:42 | demand-backup/19-delete-data | ++ : logger.go:42: 12:28:42 | demand-backup/19-delete-data | + data= logger.go:42: 12:28:42 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 12-delete-data-gcp-1 --from-literal=data= logger.go:42: 12:28:42 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-1 created logger.go:42: 12:28:42 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 12:28:42 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:42 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:28:42 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:42 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 12:28:42 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 12:28:42 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:28:43 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 12:28:43 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 12:28:43 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 12:28:43 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 12:28:43 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 12:28:43 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:28:43 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:28:43 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:28:44 | demand-backup/19-delete-data | ++ : logger.go:42: 12:28:44 | demand-backup/19-delete-data | + data= logger.go:42: 12:28:44 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 12-delete-data-gcp-2 --from-literal=data= logger.go:42: 12:28:44 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-2 created logger.go:42: 12:28:45 | demand-backup/19-delete-data | test step completed 19-delete-data logger.go:42: 12:28:45 | demand-backup/20-restore-from-gcp | starting test step 20-restore-from-gcp logger.go:42: 12:28:46 | demand-backup/20-restore-from-gcp | PerconaServerMySQLRestore:kuttl-test-clear-eel/demand-backup-restore-gcp created logger.go:42: 12:34:07 | demand-backup/20-restore-from-gcp | test step completed 20-restore-from-gcp logger.go:42: 12:34:07 | demand-backup/21-read-data | starting test step 21-read-data logger.go:42: 12:34:07 | demand-backup/21-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 12:34:07 | demand-backup/21-read-data | + source ../../functions logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ realpath ../../.. logger.go:42: 12:34:07 | demand-backup/21-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:34:07 | demand-backup/21-read-data | ++++ pwd logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:34:07 | demand-backup/21-read-data | ++ test_name=demand-backup logger.go:42: 12:34:07 | demand-backup/21-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:34:07 | demand-backup/21-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:34:07 | demand-backup/21-read-data | ++++ which gdate logger.go:42: 12:34:07 | demand-backup/21-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:34:07 | demand-backup/21-read-data | ++++ which date logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ date=/usr/bin/date logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ oc get projects logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ : logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ kubectl get nodes logger.go:42: 12:34:07 | demand-backup/21-read-data | +++ grep '^minikube' logger.go:42: 12:34:08 | demand-backup/21-read-data | ++ oc get projects logger.go:42: 12:34:08 | demand-backup/21-read-data | ++ get_cluster_name logger.go:42: 12:34:08 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:34:08 | demand-backup/21-read-data | + cluster_name=demand-backup logger.go:42: 12:34:08 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 12:34:08 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:08 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:34:08 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:08 | demand-backup/21-read-data | ++ local pod= logger.go:42: 12:34:08 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 12:34:08 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:34:09 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 12:34:09 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 12:34:09 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 12:34:09 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 12:34:09 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 12:34:09 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:34:09 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:34:09 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:10 | demand-backup/21-read-data | + data=100500 logger.go:42: 12:34:10 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-gcp-0 --from-literal=data=100500 logger.go:42: 12:34:10 | demand-backup/21-read-data | configmap/06-read-data-gcp-0 created logger.go:42: 12:34:10 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 12:34:10 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:10 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:34:10 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:10 | demand-backup/21-read-data | ++ local pod= logger.go:42: 12:34:10 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 12:34:10 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:34:11 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 12:34:11 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 12:34:11 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 12:34:11 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 12:34:11 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 12:34:11 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:11 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:34:11 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:34:12 | demand-backup/21-read-data | + data=100500 logger.go:42: 12:34:12 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-gcp-1 --from-literal=data=100500 logger.go:42: 12:34:12 | demand-backup/21-read-data | configmap/06-read-data-gcp-1 created logger.go:42: 12:34:12 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 12:34:12 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:12 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:34:12 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:12 | demand-backup/21-read-data | ++ local pod= logger.go:42: 12:34:12 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 12:34:12 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:34:13 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 12:34:13 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 12:34:13 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 12:34:13 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 12:34:13 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 12:34:13 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:13 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:34:13 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:34:14 | demand-backup/21-read-data | + data=100500 logger.go:42: 12:34:14 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-gcp-2 --from-literal=data=100500 logger.go:42: 12:34:14 | demand-backup/21-read-data | configmap/06-read-data-gcp-2 created logger.go:42: 12:34:15 | demand-backup/21-read-data | test step completed 21-read-data logger.go:42: 12:34:15 | demand-backup/22-create-backup-azure | starting test step 22-create-backup-azure logger.go:42: 12:34:15 | demand-backup/22-create-backup-azure | PerconaServerMySQLBackup:kuttl-test-clear-eel/demand-backup-azure created logger.go:42: 12:34:26 | demand-backup/22-create-backup-azure | test step completed 22-create-backup-azure logger.go:42: 12:34:26 | demand-backup/23-delete-data | starting test step 23-delete-data logger.go:42: 12:34:26 | demand-backup/23-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 16-delete-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 12:34:26 | demand-backup/23-delete-data | + source ../../functions logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ realpath ../../.. logger.go:42: 12:34:26 | demand-backup/23-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:34:26 | demand-backup/23-delete-data | ++++ pwd logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:34:26 | demand-backup/23-delete-data | ++ test_name=demand-backup logger.go:42: 12:34:26 | demand-backup/23-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:34:26 | demand-backup/23-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:34:26 | demand-backup/23-delete-data | ++++ which gdate logger.go:42: 12:34:26 | demand-backup/23-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:34:26 | demand-backup/23-delete-data | ++++ which date logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ date=/usr/bin/date logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ oc get projects logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ : logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ kubectl get nodes logger.go:42: 12:34:26 | demand-backup/23-delete-data | +++ grep '^minikube' logger.go:42: 12:34:27 | demand-backup/23-delete-data | ++ oc get projects logger.go:42: 12:34:27 | demand-backup/23-delete-data | +++ get_cluster_name logger.go:42: 12:34:27 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:34:27 | demand-backup/23-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:34:27 | demand-backup/23-delete-data | ++ local cluster=demand-backup logger.go:42: 12:34:27 | demand-backup/23-delete-data | ++ echo demand-backup-haproxy logger.go:42: 12:34:27 | demand-backup/23-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:34:27 | demand-backup/23-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 12:34:27 | demand-backup/23-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:34:27 | demand-backup/23-delete-data | + local pod= logger.go:42: 12:34:27 | demand-backup/23-delete-data | ++ get_client_pod logger.go:42: 12:34:27 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:34:28 | demand-backup/23-delete-data | + client_pod=mysql-client logger.go:42: 12:34:28 | demand-backup/23-delete-data | + wait_pod mysql-client logger.go:42: 12:34:28 | demand-backup/23-delete-data | + local pod=mysql-client logger.go:42: 12:34:28 | demand-backup/23-delete-data | + set +o xtrace logger.go:42: 12:34:28 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 12:34:28 | demand-backup/23-delete-data | + sed -e 's/mysql: //' logger.go:42: 12:34:28 | demand-backup/23-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:34:28 | demand-backup/23-delete-data | + kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:34:29 | demand-backup/23-delete-data | + : logger.go:42: 12:34:29 | demand-backup/23-delete-data | ++ get_cluster_name logger.go:42: 12:34:29 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:34:29 | demand-backup/23-delete-data | + cluster_name=demand-backup logger.go:42: 12:34:29 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 12:34:29 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:29 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:34:29 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:29 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 12:34:29 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 12:34:29 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:34:30 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 12:34:30 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 12:34:30 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 12:34:30 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 12:34:30 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 12:34:30 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:30 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:34:30 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:34:31 | demand-backup/23-delete-data | ++ : logger.go:42: 12:34:31 | demand-backup/23-delete-data | + data= logger.go:42: 12:34:31 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 16-delete-data-azure-0 --from-literal=data= logger.go:42: 12:34:31 | demand-backup/23-delete-data | configmap/16-delete-data-azure-0 created logger.go:42: 12:34:31 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 12:34:31 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:31 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:34:31 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:31 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 12:34:31 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 12:34:31 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:34:32 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 12:34:32 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 12:34:32 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 12:34:32 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 12:34:32 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 12:34:32 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:32 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:34:32 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:34:33 | demand-backup/23-delete-data | ++ : logger.go:42: 12:34:33 | demand-backup/23-delete-data | + data= logger.go:42: 12:34:33 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 16-delete-data-azure-1 --from-literal=data= logger.go:42: 12:34:33 | demand-backup/23-delete-data | configmap/16-delete-data-azure-1 created logger.go:42: 12:34:33 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 12:34:33 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:33 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:34:33 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:33 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 12:34:33 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 12:34:33 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:34:34 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 12:34:34 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 12:34:34 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 12:34:34 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 12:34:34 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 12:34:34 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:34:34 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:34:34 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:34:35 | demand-backup/23-delete-data | ++ : logger.go:42: 12:34:35 | demand-backup/23-delete-data | + data= logger.go:42: 12:34:35 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-clear-eel 16-delete-data-azure-2 --from-literal=data= logger.go:42: 12:34:35 | demand-backup/23-delete-data | configmap/16-delete-data-azure-2 created logger.go:42: 12:34:36 | demand-backup/23-delete-data | test step completed 23-delete-data logger.go:42: 12:34:36 | demand-backup/24-restore-from-azure | starting test step 24-restore-from-azure logger.go:42: 12:34:36 | demand-backup/24-restore-from-azure | PerconaServerMySQLRestore:kuttl-test-clear-eel/demand-backup-restore-azure created logger.go:42: 12:39:44 | demand-backup/24-restore-from-azure | test step completed 24-restore-from-azure logger.go:42: 12:39:44 | demand-backup/25-read-data | starting test step 25-read-data logger.go:42: 12:39:44 | demand-backup/25-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 12:39:44 | demand-backup/25-read-data | + source ../../functions logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ realpath ../../.. logger.go:42: 12:39:44 | demand-backup/25-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:39:44 | demand-backup/25-read-data | ++++ pwd logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:39:44 | demand-backup/25-read-data | ++ test_name=demand-backup logger.go:42: 12:39:44 | demand-backup/25-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:39:44 | demand-backup/25-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ GIT_BRANCH=PR-916 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:39:44 | demand-backup/25-read-data | ++++ which gdate logger.go:42: 12:39:44 | demand-backup/25-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:39:44 | demand-backup/25-read-data | ++++ which date logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ date=/usr/bin/date logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ oc get projects logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ : logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ kubectl get nodes logger.go:42: 12:39:44 | demand-backup/25-read-data | +++ grep '^minikube' logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ oc get projects logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ get_cluster_name logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-clear-eel get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:39:45 | demand-backup/25-read-data | + cluster_name=demand-backup logger.go:42: 12:39:45 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ local pod= logger.go:42: 12:39:45 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 12:39:45 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 12:39:45 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 12:39:46 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 12:39:46 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:39:46 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:39:46 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:47 | demand-backup/25-read-data | + data=100500 logger.go:42: 12:39:47 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-azure-0 --from-literal=data=100500 logger.go:42: 12:39:47 | demand-backup/25-read-data | configmap/06-read-data-azure-0 created logger.go:42: 12:39:47 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 12:39:47 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:47 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:39:47 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:47 | demand-backup/25-read-data | ++ local pod= logger.go:42: 12:39:47 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 12:39:47 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:39:47 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 12:39:47 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 12:39:47 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 12:39:47 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 12:39:48 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 12:39:48 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:48 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:39:48 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:39:48 | demand-backup/25-read-data | + data=100500 logger.go:42: 12:39:48 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-azure-1 --from-literal=data=100500 logger.go:42: 12:39:49 | demand-backup/25-read-data | configmap/06-read-data-azure-1 created logger.go:42: 12:39:49 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 12:39:49 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:49 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:39:49 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:49 | demand-backup/25-read-data | ++ local pod= logger.go:42: 12:39:49 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 12:39:49 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-clear-eel get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:39:49 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 12:39:49 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 12:39:49 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 12:39:49 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 12:39:50 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 12:39:50 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-clear-eel exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:39:50 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:39:50 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:39:50 | demand-backup/25-read-data | + data=100500 logger.go:42: 12:39:50 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-clear-eel 06-read-data-azure-2 --from-literal=data=100500 logger.go:42: 12:39:51 | demand-backup/25-read-data | configmap/06-read-data-azure-2 created logger.go:42: 12:39:52 | demand-backup/25-read-data | test step completed 25-read-data logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | starting test step 26-delete-all-backups logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete ps-backup --all -n "${NAMESPACE}" backup_name_minio="demand-backup-minio" accessKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_ACCESS_KEY_ID}' | base64 -d)" secretKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_SECRET_ACCESS_KEY}' | base64 -d)" backup_exists=$( kubectl run -n "${NAMESPACE}" -i --rm aws-cli --image=perconalab/awscli --restart=Never -- \ /usr/bin/env AWS_ACCESS_KEY_ID="${accessKey}" AWS_SECRET_ACCESS_KEY="${secretKey}" AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls 'operator-testing/' | grep -c "${backup_name_minio}/" | cat exit "${PIPESTATUS[0]}" ) if [[ 1 -eq $backup_exists ]]; then echo "Backup was not removed from bucket -- minio" exit 1 fi] logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | + source ../../functions logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ realpath ../../.. logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | ++++ pwd logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | ++ test_name=demand-backup logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ GIT_BRANCH=PR-916 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export MINIO_VER=5.4.0 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ MINIO_VER=5.4.0 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | ++++ which gdate logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | ++++ which date logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ date=/usr/bin/date logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ oc get projects logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ : logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ kubectl get nodes logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | +++ grep '^minikube' logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | ++ oc get projects logger.go:42: 12:39:52 | demand-backup/26-delete-all-backups | + kubectl delete ps-backup --all -n kuttl-test-clear-eel logger.go:42: 12:39:53 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-azure" deleted logger.go:42: 12:39:53 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-gcp" deleted logger.go:42: 12:39:53 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-minio" deleted logger.go:42: 12:39:53 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-s3" deleted logger.go:42: 12:39:56 | demand-backup/26-delete-all-backups | + backup_name_minio=demand-backup-minio logger.go:42: 12:39:56 | demand-backup/26-delete-all-backups | ++ kubectl -n kuttl-test-clear-eel get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 12:39:56 | demand-backup/26-delete-all-backups | ++ base64 -d logger.go:42: 12:39:57 | demand-backup/26-delete-all-backups | + accessKey=some-access-key logger.go:42: 12:39:57 | demand-backup/26-delete-all-backups | ++ kubectl -n kuttl-test-clear-eel get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 12:39:57 | demand-backup/26-delete-all-backups | ++ base64 -d logger.go:42: 12:39:57 | demand-backup/26-delete-all-backups | + secretKey=some-secret-key logger.go:42: 12:39:57 | demand-backup/26-delete-all-backups | ++ kubectl run -n kuttl-test-clear-eel -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls operator-testing/ logger.go:42: 12:39:57 | demand-backup/26-delete-all-backups | ++ grep -c demand-backup-minio/ logger.go:42: 12:39:57 | demand-backup/26-delete-all-backups | ++ cat logger.go:42: 12:39:58 | demand-backup/26-delete-all-backups | If you don't see a command prompt, try pressing enter. logger.go:42: 12:40:00 | demand-backup/26-delete-all-backups | ++ exit 0 logger.go:42: 12:40:00 | demand-backup/26-delete-all-backups | + backup_exists=0 logger.go:42: 12:40:00 | demand-backup/26-delete-all-backups | + [[ 1 -eq 0 ]] logger.go:42: 12:40:00 | demand-backup/26-delete-all-backups | test step completed 26-delete-all-backups logger.go:42: 12:40:00 | demand-backup/98-drop-finalizer | starting test step 98-drop-finalizer logger.go:42: 12:40:01 | demand-backup/98-drop-finalizer | PerconaServerMySQL:kuttl-test-clear-eel/demand-backup updated logger.go:42: 12:40:01 | demand-backup/98-drop-finalizer | test step completed 98-drop-finalizer logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions destroy_operator] logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/tests/demand-backup logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | ++ test_name=demand-backup logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/vars.sh logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-916 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/deploy logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-916/e2e-tests/conf logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-916 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-916 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export VERSION=PR-916-a5bda550 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ VERSION=PR-916-a5bda550 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-916-a5bda550 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export MINIO_VER=5.4.0 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ MINIO_VER=5.4.0 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-916/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | ++++ which date logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ date=/usr/bin/date logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ oc get projects logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ : logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ kubectl get nodes logger.go:42: 12:40:01 | demand-backup/99-remove-cluster-gracefully | +++ grep '^minikube' logger.go:42: 12:40:02 | demand-backup/99-remove-cluster-gracefully | ++ oc get projects logger.go:42: 12:40:02 | demand-backup/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 12:40:02 | demand-backup/99-remove-cluster-gracefully | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 12:40:02 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:40:02 | demand-backup/99-remove-cluster-gracefully | deployment.apps "percona-server-mysql-operator" force deleted logger.go:42: 12:40:03 | demand-backup/99-remove-cluster-gracefully | + [[ -n ps-operator ]] logger.go:42: 12:40:03 | demand-backup/99-remove-cluster-gracefully | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 12:40:03 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:40:03 | demand-backup/99-remove-cluster-gracefully | namespace "ps-operator" force deleted logger.go:42: 12:40:09 | demand-backup/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 12:40:09 | demand-backup | demand-backup events from ns kuttl-test-clear-eel: logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:34 +0000 UTC Normal Pod mysql-client Binding Scheduled Successfully assigned kuttl-test-clear-eel/mysql-client to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:34 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulled Container image "percona/percona-server:8.0.33" already present on machine kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:34 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Created Created container: mysql-client kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:34 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Started Started container mysql-client kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:39 +0000 UTC Normal PersistentVolumeClaim minio-service WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:40 +0000 UTC Normal ReplicaSet.apps minio-service-86dfccd949 SuccessfulCreate Created pod: minio-service-86dfccd949-b46pp replicaset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:40 +0000 UTC Normal Pod minio-service-post-job-249vw Binding Scheduled Successfully assigned kuttl-test-clear-eel/minio-service-post-job-249vw to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:40 +0000 UTC Normal Job.batch minio-service-post-job SuccessfulCreate Created pod: minio-service-post-job-249vw job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:40 +0000 UTC Normal Deployment.apps minio-service ScalingReplicaSet Scaled up replica set minio-service-86dfccd949 to 1 deployment-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:40 +0000 UTC Normal PersistentVolumeClaim minio-service ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:40 +0000 UTC Normal PersistentVolumeClaim minio-service Provisioning External provisioner is provisioning volume for claim "kuttl-test-clear-eel/minio-service" pd.csi.storage.gke.io_gke-b37f1d5431e84790a8b2-42bf-6fdc-vm_cc49398e-26a4-48d2-b20c-893734f24d25 logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:41 +0000 UTC Normal Pod minio-service-post-job-249vw.spec.containers{minio-make-user} Pulled Container image "quay.io/minio/mc:RELEASE.2024-11-21T17-21-54Z" already present on machine kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:41 +0000 UTC Normal Pod minio-service-post-job-249vw.spec.containers{minio-make-user} Created Created container: minio-make-user kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:41 +0000 UTC Normal Pod minio-service-post-job-249vw.spec.containers{minio-make-user} Started Started container minio-make-user kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:43 +0000 UTC Normal PersistentVolumeClaim minio-service ProvisioningSucceeded Successfully provisioned volume pvc-5cdeb4cc-9076-46b7-9485-ad6665c6295c pd.csi.storage.gke.io_gke-b37f1d5431e84790a8b2-42bf-6fdc-vm_cc49398e-26a4-48d2-b20c-893734f24d25 logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:44 +0000 UTC Normal Pod minio-service-86dfccd949-b46pp Binding Scheduled Successfully assigned kuttl-test-clear-eel/minio-service-86dfccd949-b46pp to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:51 +0000 UTC Normal Pod minio-service-86dfccd949-b46pp SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-5cdeb4cc-9076-46b7-9485-ad6665c6295c" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:52 +0000 UTC Normal Pod minio-service-86dfccd949-b46pp.spec.containers{minio} Pulled Container image "quay.io/minio/minio:RELEASE.2024-12-18T13-15-44Z" already present on machine kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:52 +0000 UTC Normal Pod minio-service-86dfccd949-b46pp.spec.containers{minio} Created Created container: minio kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:05:52 +0000 UTC Normal Pod minio-service-86dfccd949-b46pp.spec.containers{minio} Started Started container minio kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:04 +0000 UTC Normal Job.batch minio-service-post-job Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:06 +0000 UTC Normal Pod aws-cli Binding Scheduled Successfully assigned kuttl-test-clear-eel/aws-cli to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:06 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:06 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 119ms (119ms including waiting). Image size: 30314917 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:06 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container: aws-cli kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:06 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:12 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:12 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:12 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-clear-eel/datadir-demand-backup-mysql-0" pd.csi.storage.gke.io_gke-b37f1d5431e84790a8b2-42bf-6fdc-vm_cc49398e-26a4-48d2-b20c-893734f24d25 logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:12 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-0 Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:12 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:13 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:13 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 144ms (144ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55 pd.csi.storage.gke.io_gke-b37f1d5431e84790a8b2-42bf-6fdc-vm_cc49398e-26a4-48d2-b20c-893734f24d25 logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 106ms (106ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 111ms (111ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:16 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:24 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 136ms (136ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 114ms (114ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 113ms (113ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:27 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 129ms (129ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 101ms (101ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:48 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:48 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:49 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:49 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 153ms (153ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:49 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:49 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:51 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:51 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 119ms (119ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:51 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:51 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:51 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:52 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 123ms (123ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:52 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:06:52 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:00 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:00 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-clear-eel/datadir-demand-backup-mysql-1" pd.csi.storage.gke.io_gke-b37f1d5431e84790a8b2-42bf-6fdc-vm_cc49398e-26a4-48d2-b20c-893734f24d25 logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:00 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:00 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-1 Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:00 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:02 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 126ms (127ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:04 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-f145e563-6e2e-463c-9f80-73112102a77e pd.csi.storage.gke.io_gke-b37f1d5431e84790a8b2-42bf-6fdc-vm_cc49398e-26a4-48d2-b20c-893734f24d25 logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:04 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:04 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:04 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 179ms (179ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 157ms (157ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 148ms (148ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:08 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:08 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:09 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:09 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 176ms (176ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:09 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:09 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 110ms (110ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 120ms (120ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:12 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:12 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:12 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f145e563-6e2e-463c-9f80-73112102a77e" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:13 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:13 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 127ms (127ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:13 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:13 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 203ms (203ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 251ms (251ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 107ms (107ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 236ms (236ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:17 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 151ms (151ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:17 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:17 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:17 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:17 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 169ms (169ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:17 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:17 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:24 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:24 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:24 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:25 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 145ms (145ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:25 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:25 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:26 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:26 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 114ms (114ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:26 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:26 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 118ms (118ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:34 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:07:33 Waiting for MySQL ready state 2025/05/27 12:07:33 MySQL is ready 2025/05/27 12:07:33 Peers: [3863643361613435.demand-backup-mysql-unready.kuttl-test-clear-eel 6361663330343264.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:07:33 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:07:34 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:07:34 lookup demand-backup-mysql-1 [10.118.25.20] 2025/05/27 12:07:34 PodIP: 10.118.25.20 2025/05/27 12:07:34 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.9] 2025/05/27 12:07:34 PrimaryIP: 10.118.26.9 2025/05/27 12:07:34 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:07:34 Opening connection to 10.118.25.20 2025/05/27 12:07:34 Clone required: true 2025/05/27 12:07:34 Checking if a clone in progress 2025/05/27 12:07:34 Clone in progress: false 2025/05/27 12:07:34 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:07:34 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 121ms (121ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:07:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 118ms (118ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:09 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:09 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:09 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-clear-eel/datadir-demand-backup-mysql-2" pd.csi.storage.gke.io_gke-b37f1d5431e84790a8b2-42bf-6fdc-vm_cc49398e-26a4-48d2-b20c-893734f24d25 logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:09 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-2 Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:09 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:12 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-363b05f0-ede2-4214-97d5-a0b54dd4c2f3 pd.csi.storage.gke.io_gke-b37f1d5431e84790a8b2-42bf-6fdc-vm_cc49398e-26a4-48d2-b20c-893734f24d25 logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:13 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:20 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-363b05f0-ede2-4214-97d5-a0b54dd4c2f3" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 141ms (141ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:24 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:24 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 134ms (134ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:24 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:24 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:24 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:24 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 140ms (140ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:24 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:24 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:25 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:25 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 249ms (249ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:25 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:25 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:43 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:08:42 Waiting for MySQL ready state 2025/05/27 12:08:42 MySQL is ready 2025/05/27 12:08:42 Peers: [3831653763616437.demand-backup-mysql-unready.kuttl-test-clear-eel 3863643361613435.demand-backup-mysql-unready.kuttl-test-clear-eel 6361663330343264.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:08:42 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:08:42 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:08:42 lookup demand-backup-mysql-2 [10.118.24.18] 2025/05/27 12:08:42 PodIP: 10.118.24.18 2025/05/27 12:08:42 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.9] 2025/05/27 12:08:42 PrimaryIP: 10.118.26.9 2025/05/27 12:08:42 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:08:42 Opening connection to 10.118.24.18 2025/05/27 12:08:42 Clone required: true 2025/05/27 12:08:42 Checking if a clone in progress 2025/05/27 12:08:42 Clone in progress: false 2025/05/27 12:08:42 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:08:43 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:08:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 116ms (116ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:30 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/05/27 12:09:30 MySQL state is not ready... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:35 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/05/27 12:09:35 MySQL state is not ready... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:40 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:48 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:49 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 153ms (153ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 110ms (110ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 129ms (129ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 121ms (121ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:09:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:10:10 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:10:09 Waiting for MySQL ready state 2025/05/27 12:10:09 MySQL is ready 2025/05/27 12:10:09 Peers: [3831653763616437.demand-backup-mysql-unready.kuttl-test-clear-eel 3864623730653833.demand-backup-mysql-unready.kuttl-test-clear-eel 6361663330343264.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:10:09 FQDN: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:10:09 Primary: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:10:09 lookup demand-backup-mysql-0 [10.118.26.12] 2025/05/27 12:10:09 PodIP: 10.118.26.12 2025/05/27 12:10:09 lookup demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel [10.118.24.18] 2025/05/27 12:10:09 PrimaryIP: 10.118.24.18 2025/05/27 12:10:09 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:10:09 Opening connection to 10.118.26.12 2025/05/27 12:10:09 Clone required: true 2025/05/27 12:10:09 Checking if a clone in progress 2025/05/27 12:10:09 Clone in progress: false 2025/05/27 12:10:09 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:10:10 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:10:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:10:14 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 106ms (106ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:01 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7 Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-demand-backup-minio-minio-d7xk7 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:01 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:01 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 160ms (160ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:01 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:01 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:01 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio SuccessfulCreate Created pod: xb-demand-backup-minio-minio-d7xk7 job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:04 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:04 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 139ms (139ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:04 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:04 +0000 UTC Normal Pod xb-demand-backup-minio-minio-d7xk7.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:10 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:46 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:47 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:48 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:48 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:11:48 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:06 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:10 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:17 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:12:47 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:22 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-restore-demand-backup-restore-minio-clrjt to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:22 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-clrjt job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:29 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:31 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:32 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 177ms (177ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:32 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:32 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:33 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:33 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 135ms (135ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:33 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:33 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-clrjt.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:42 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:43 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:43 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:43 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:43 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 159ms (159ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 137ms (137ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:46 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 117ms (117ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:46 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:46 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:13:56 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 113ms (113ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:06 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:08 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 151ms (151ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:08 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:08 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 102ms (102ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 124ms (124ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 102ms (102ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:18 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:19 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:19 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 131ms (131ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:19 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:19 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 103ms (103ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 121ms (121ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:33 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 128ms (128ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:43 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:47 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:47 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:48 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 183ms (183ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:48 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:48 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:49 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 127ms (127ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 157ms (157ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:50 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f145e563-6e2e-463c-9f80-73112102a77e" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:51 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:51 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 135ms (135ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:51 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:51 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 112ms (112ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 117ms (117ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 159ms (159ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:54 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:54 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 181ms (181ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 140ms (140ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:55 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 107ms (107ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 119ms (119ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 104ms (104ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 113ms (113ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 106ms (106ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 114ms (114ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 96ms (96ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:14:57 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 103ms (103ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:14 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:15:13 Waiting for MySQL ready state 2025/05/27 12:15:13 MySQL is ready 2025/05/27 12:15:13 Peers: [3930313438343138.demand-backup-mysql-unready.kuttl-test-clear-eel 6466313534333861.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:15:13 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:15:13 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:15:13 lookup demand-backup-mysql-1 [10.118.25.25] 2025/05/27 12:15:13 PodIP: 10.118.25.25 2025/05/27 12:15:13 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.13] 2025/05/27 12:15:13 PrimaryIP: 10.118.26.13 2025/05/27 12:15:13 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:15:13 Opening connection to 10.118.25.25 2025/05/27 12:15:13 Clone required: true 2025/05/27 12:15:13 Checking if a clone in progress 2025/05/27 12:15:13 Clone in progress: false 2025/05/27 12:15:13 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:15:14 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 122ms (122ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:48 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:56 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-363b05f0-ede2-4214-97d5-a0b54dd4c2f3" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:57 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 161ms (161ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:15:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 117ms (117ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 109ms (109ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 113ms (113ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:18 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:16:17 Waiting for MySQL ready state 2025/05/27 12:16:17 MySQL is ready 2025/05/27 12:16:17 Peers: [3331376465333937.demand-backup-mysql-unready.kuttl-test-clear-eel 3930313438343138.demand-backup-mysql-unready.kuttl-test-clear-eel 6466313534333861.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:16:17 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:16:17 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:16:17 lookup demand-backup-mysql-2 [10.118.24.21] 2025/05/27 12:16:17 PodIP: 10.118.24.21 2025/05/27 12:16:17 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.13] 2025/05/27 12:16:17 PrimaryIP: 10.118.26.13 2025/05/27 12:16:17 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:16:17 Opening connection to 10.118.24.21 2025/05/27 12:16:17 Clone required: true 2025/05/27 12:16:17 Checking if a clone in progress 2025/05/27 12:16:17 Clone in progress: false 2025/05/27 12:16:17 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:16:18 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:16:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 213ms (213ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:41 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:41 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:43 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:43 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:44 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:44 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:44 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:17:52 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:18:12 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:18:12 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:18:43 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:18:43 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:17 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-restore-demand-backup-restore-minio-backup-source-glnbk to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:17 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-backup-source-glnbk job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:24 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 278ms (278ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:27 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:27 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 207ms (207ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:27 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:27 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-glnbk.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:36 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:37 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:37 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:37 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 259ms (259ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:38 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 250ms (250ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 224ms (224ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:50 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 231ms (231ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:53 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 242ms (242ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 223ms (223ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 261ms (261ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 221ms (221ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:19:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:14 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:14 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 252ms (252ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:16 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 187ms (187ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 256ms (256ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 223ms (223ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:29 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:31 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 254ms (254ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 222ms (222ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 521ms (521ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:35 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:35 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:35 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 253ms (253ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:35 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:35 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:37 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:37 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 234ms (234ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:37 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:37 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:37 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f145e563-6e2e-463c-9f80-73112102a77e" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 296ms (296ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:38 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:39 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:39 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 266ms (266ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:39 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:39 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:40 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 288ms (288ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 190ms (190ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 266ms (266ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:42 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 194ms (194ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 225ms (225ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 239ms (239ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:49 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 219ms (219ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 222ms (222ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:53 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:53 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:53 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 250ms (250ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:53 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:20:53 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:02 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:21:00 Waiting for MySQL ready state 2025/05/27 12:21:00 MySQL is ready 2025/05/27 12:21:00 Peers: [3263613839336363.demand-backup-mysql-unready.kuttl-test-clear-eel 3864333333303266.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:21:00 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:21:00 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:21:00 lookup demand-backup-mysql-1 [10.118.25.29] 2025/05/27 12:21:00 PodIP: 10.118.25.29 2025/05/27 12:21:00 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.16] 2025/05/27 12:21:00 PrimaryIP: 10.118.26.16 2025/05/27 12:21:01 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:21:01 Opening connection to 10.118.25.29 2025/05/27 12:21:01 Clone required: true 2025/05/27 12:21:01 Checking if a clone in progress 2025/05/27 12:21:01 Clone in progress: false 2025/05/27 12:21:01 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:21:02 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:02 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 218ms (218ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 253ms (254ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:35 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:42 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-363b05f0-ede2-4214-97d5-a0b54dd4c2f3" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 256ms (256ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 105ms (105ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 223ms (223ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 102ms (102ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:21:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:04 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:22:03 Waiting for MySQL ready state 2025/05/27 12:22:03 MySQL is ready 2025/05/27 12:22:03 Peers: [3263613839336363.demand-backup-mysql-unready.kuttl-test-clear-eel 3263666364626632.demand-backup-mysql-unready.kuttl-test-clear-eel 3864333333303266.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:22:03 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:22:03 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:22:03 lookup demand-backup-mysql-2 [10.118.24.24] 2025/05/27 12:22:03 PodIP: 10.118.24.24 2025/05/27 12:22:03 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.16] 2025/05/27 12:22:03 PrimaryIP: 10.118.26.16 2025/05/27 12:22:03 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:22:03 Opening connection to 10.118.24.24 2025/05/27 12:22:03 Clone required: true 2025/05/27 12:22:03 Checking if a clone in progress 2025/05/27 12:22:03 Clone in progress: false 2025/05/27 12:22:03 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:22:04 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:08 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 123ms (123ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:48 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-demand-backup-s3-aws-s3-clck2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:48 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 SuccessfulCreate Created pod: xb-demand-backup-s3-aws-s3-clck2 job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:49 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:49 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 280ms (280ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:49 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:49 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:51 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:51 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 111ms (111ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:51 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:51 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-clck2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:22:57 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:10 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:10 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:10 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:10 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:12 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:12 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:17 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:19 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/05/27 12:23:19 MySQL state is not ready... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:24 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:40 +0000 UTC Warning Pod demand-backup-orc-2.spec.containers{orc} Unhealthy Readiness probe failed: Get "http://10.118.24.23:3000/api/health": dial tcp 10.118.24.23:3000: connect: connection refused kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:41 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:23:41 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:12 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:12 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:46 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-restore-demand-backup-restore-s3-2qgkj to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:46 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 SuccessfulCreate Created pod: xb-restore-demand-backup-restore-s3-2qgkj job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:54 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:55 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:55 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 193ms (193ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:55 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:55 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:57 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:57 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 110ms (110ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:57 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:24:57 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-2qgkj.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:08 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:09 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:09 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:09 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:09 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:10 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 134ms (134ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:10 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:10 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 115ms (115ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:12 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 104ms (104ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:12 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:12 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:22 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 111ms (111ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:30 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 151ms (151ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:33 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:33 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 114ms (114ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:33 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 108ms (108ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 110ms (110ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:34 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:44 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:45 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:45 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 142ms (142ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:45 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:45 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:46 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:46 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 109ms (109ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:47 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:47 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:47 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:47 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 103ms (103ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:47 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:47 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:25:58 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 113ms (114ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:06 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:07 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 174ms (174ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:09 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 133ms (133ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 117ms (117ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:11 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 141ms (141ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 207ms (207ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:14 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 111ms (111ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:14 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:14 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:14 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:14 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f145e563-6e2e-463c-9f80-73112102a77e" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 242ms (242ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 147ms (147ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:16 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 102ms (102ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 114ms (114ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:17 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 102ms (102ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 116ms (116ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 225ms (225ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:20 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:20 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:21 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 284ms (284ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:21 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:21 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:22 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:22 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 117ms (117ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:22 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:22 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:22 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:23 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 206ms (206ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:23 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:23 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:33 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 109ms (109ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:36 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:26:35 Waiting for MySQL ready state 2025/05/27 12:26:35 MySQL is ready 2025/05/27 12:26:35 Peers: [6163653864326230.demand-backup-mysql-unready.kuttl-test-clear-eel 6661323666383536.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:26:35 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:26:35 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:26:35 lookup demand-backup-mysql-1 [10.118.25.34] 2025/05/27 12:26:35 PodIP: 10.118.25.34 2025/05/27 12:26:35 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.19] 2025/05/27 12:26:35 PrimaryIP: 10.118.26.19 2025/05/27 12:26:35 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:26:35 Opening connection to 10.118.25.34 2025/05/27 12:26:35 Clone required: true 2025/05/27 12:26:35 Checking if a clone in progress 2025/05/27 12:26:35 Clone in progress: false 2025/05/27 12:26:35 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:26:36 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:36 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:26:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 125ms (125ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:10 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:18 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-363b05f0-ede2-4214-97d5-a0b54dd4c2f3" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:19 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:19 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 128ms (128ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:19 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:19 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:21 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:21 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 103ms (103ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:21 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 112ms (112ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 115ms (115ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:40 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:27:39 Waiting for MySQL ready state 2025/05/27 12:27:39 MySQL is ready 2025/05/27 12:27:39 Peers: [6163653864326230.demand-backup-mysql-unready.kuttl-test-clear-eel 6363636537343239.demand-backup-mysql-unready.kuttl-test-clear-eel 6661323666383536.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:27:39 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:27:39 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:27:39 lookup demand-backup-mysql-2 [10.118.24.27] 2025/05/27 12:27:39 PodIP: 10.118.24.27 2025/05/27 12:27:39 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.19] 2025/05/27 12:27:39 PrimaryIP: 10.118.26.19 2025/05/27 12:27:39 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:27:39 Opening connection to 10.118.24.27 2025/05/27 12:27:39 Clone required: true 2025/05/27 12:27:39 Checking if a clone in progress 2025/05/27 12:27:39 Clone in progress: false 2025/05/27 12:27:39 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:27:40 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:27:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 116ms (116ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:25 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-demand-backup-gcp-gcp-cs-s5dtl to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:25 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs SuccessfulCreate Created pod: xb-demand-backup-gcp-gcp-cs-s5dtl job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:26 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:26 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 137ms (137ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:26 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:26 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:28 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:28 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 133ms (133ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:28 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:28 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-s5dtl.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:34 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:46 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:46 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:48 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:48 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:28:56 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/05/27 12:28:56 MySQL state is not ready... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:29:01 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:29:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:29:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:29:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:29:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-restore-demand-backup-restore-gcp-md5ks to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:23 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp SuccessfulCreate Created pod: xb-restore-demand-backup-restore-gcp-md5ks job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:38 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:39 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:40 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 162ms (162ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:40 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:40 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:41 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:41 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 115ms (115ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:41 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:42 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-md5ks.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:53 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:54 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:54 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:54 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:54 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:54 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 154ms (155ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:54 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:55 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:56 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:56 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 129ms (129ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 142ms (142ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:30:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:07 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 141ms (141ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:18 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:19 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:19 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 134ms (134ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:19 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:19 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:21 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:21 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 96ms (96ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:21 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:21 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:21 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:22 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 102ms (102ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:22 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:22 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:22 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:22 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 127ms (127ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:22 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:22 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:29 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:30 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:30 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 158ms (158ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:30 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:30 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:32 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:32 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 124ms (124ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:32 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:32 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:33 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:33 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 107ms (107ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:33 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:33 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:43 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 103ms (103ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:54 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:58 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:59 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:59 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 147ms (147ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:59 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:31:59 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:00 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 115ms (115ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 129ms (129ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:02 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 140ms (140ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:02 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f145e563-6e2e-463c-9f80-73112102a77e" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:04 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:04 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 128ms (128ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:04 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:04 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:04 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 140ms (140ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 148ms (148ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 154ms (154ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:05 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:06 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:06 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 140ms (140ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:06 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:06 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 117ms (117ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 101ms (101ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 118ms (119ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 118ms (118ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 153ms (153ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 130ms (130ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 114ms (114ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:08 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:18 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 114ms (114ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:26 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:32:25 Waiting for MySQL ready state 2025/05/27 12:32:25 MySQL is ready 2025/05/27 12:32:25 Peers: [3562363230373936.demand-backup-mysql-unready.kuttl-test-clear-eel 3763663262643462.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:32:25 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:32:25 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:32:25 lookup demand-backup-mysql-1 [10.118.25.39] 2025/05/27 12:32:25 PodIP: 10.118.25.39 2025/05/27 12:32:25 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.22] 2025/05/27 12:32:25 PrimaryIP: 10.118.26.22 2025/05/27 12:32:25 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:32:25 Opening connection to 10.118.25.39 2025/05/27 12:32:25 Clone required: true 2025/05/27 12:32:25 Checking if a clone in progress 2025/05/27 12:32:25 Clone in progress: false 2025/05/27 12:32:25 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:32:26 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:32:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 108ms (108ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:00 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:08 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-363b05f0-ede2-4214-97d5-a0b54dd4c2f3" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 143ms (143ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 97ms (97ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 113ms (113ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 119ms (119ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:30 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:33:29 Waiting for MySQL ready state 2025/05/27 12:33:29 MySQL is ready 2025/05/27 12:33:29 Peers: [3365316565643731.demand-backup-mysql-unready.kuttl-test-clear-eel 3562363230373936.demand-backup-mysql-unready.kuttl-test-clear-eel 3763663262643462.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:33:29 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:33:29 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:33:29 lookup demand-backup-mysql-2 [10.118.24.30] 2025/05/27 12:33:29 PodIP: 10.118.24.30 2025/05/27 12:33:29 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.22] 2025/05/27 12:33:29 PrimaryIP: 10.118.26.22 2025/05/27 12:33:29 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:33:29 Opening connection to 10.118.24.30 2025/05/27 12:33:29 Clone required: true 2025/05/27 12:33:29 Checking if a clone in progress 2025/05/27 12:33:29 Clone in progress: false 2025/05/27 12:33:29 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:33:30 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:33:34 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 125ms (125ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:16 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-demand-backup-azure-azure-blob-b4jsn to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:16 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:16 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 147ms (147ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:16 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:16 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:16 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob SuccessfulCreate Created pod: xb-demand-backup-azure-azure-blob-b4jsn job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:18 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:18 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 111ms (111ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:18 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:18 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-b4jsn.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:24 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:39 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:39 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:42 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:42 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:34:49 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:35:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:35:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:35:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:35:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:09 +0000 UTC Warning Pod demand-backup-orc-0.spec.containers{orc} Unhealthy Readiness probe failed: Get "http://10.118.25.37:3000/api/health": dial tcp 10.118.25.37:3000: connect: connection refused kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:14 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22 Binding Scheduled Successfully assigned kuttl-test-clear-eel/xb-restore-demand-backup-restore-azure-p9c22 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:14 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure SuccessfulCreate Created pod: xb-restore-demand-backup-restore-azure-p9c22 job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:22 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 134ms (134ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 130ms (130ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-p9c22.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:38 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure Completed Job completed job-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:39 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:39 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:39 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:39 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 150ms (150ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:41 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 106ms (106ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 153ms (153ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:53 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 154ms (154ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:55 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-68e85602-71c0-4fe2-8f38-ae388a2c4f55" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 164ms (164ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:58 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:58 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 111ms (111ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:58 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:58 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:58 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:58 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 105ms (105ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 111ms (111ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:36:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:14 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 155ms (155ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 115ms (115ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:17 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:18 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 110ms (110ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:18 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:18 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 113ms (113ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:31 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:33 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-0 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 159ms (159ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:33 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 121ms (121ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 126ms (126ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:37 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-1 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:37 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:37 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 151ms (151ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:37 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:37 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 98ms (98ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 154ms (154ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:39 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f145e563-6e2e-463c-9f80-73112102a77e" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:40 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:40 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-haproxy-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-w1qx default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 134ms (134ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:41 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 120ms (120ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 117ms (117ms including waiting). Image size: 102736064 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 145ms (145ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 104ms (104ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 153ms (153ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 97ms (97ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:50 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-orc-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 145ms (145ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:50 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 107ms (107ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 107ms (107ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:52 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:37:53 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 110ms (110ms including waiting). Image size: 72477565 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:04 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:38:03 Waiting for MySQL ready state 2025/05/27 12:38:03 MySQL is ready 2025/05/27 12:38:03 Peers: [6333366635323731.demand-backup-mysql-unready.kuttl-test-clear-eel 6665303064626433.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:38:03 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:38:03 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:38:03 lookup demand-backup-mysql-1 [10.118.25.44] 2025/05/27 12:38:03 PodIP: 10.118.25.44 2025/05/27 12:38:03 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.25] 2025/05/27 12:38:03 PrimaryIP: 10.118.26.25 2025/05/27 12:38:03 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:38:03 Opening connection to 10.118.25.44 2025/05/27 12:38:03 Clone required: true 2025/05/27 12:38:03 Checking if a clone in progress 2025/05/27 12:38:03 Clone in progress: false 2025/05/27 12:38:03 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:38:04 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:04 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 115ms (115ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:38 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-clear-eel/demand-backup-mysql-2 to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-zw5j default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:46 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-363b05f0-ede2-4214-97d5-a0b54dd4c2f3" attachdetach-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:47 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:47 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-916-a5bda550" in 136ms (136ms including waiting). Image size: 108784326 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:47 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:47 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 104ms (104ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 114ms (114ms including waiting). Image size: 445611496 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 102ms (102ms including waiting). Image size: 132951989 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:38:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:39:08 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/27 12:39:07 Waiting for MySQL ready state 2025/05/27 12:39:07 MySQL is ready 2025/05/27 12:39:07 Peers: [3661373331353561.demand-backup-mysql-unready.kuttl-test-clear-eel 6333366635323731.demand-backup-mysql-unready.kuttl-test-clear-eel 6665303064626433.demand-backup-mysql-unready.kuttl-test-clear-eel] 2025/05/27 12:39:07 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:39:07 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel demand-backup-mysql-2.demand-backup-mysql.kuttl-test-clear-eel] 2025/05/27 12:39:07 lookup demand-backup-mysql-2 [10.118.24.33] 2025/05/27 12:39:07 PodIP: 10.118.24.33 2025/05/27 12:39:07 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-clear-eel [10.118.26.25] 2025/05/27 12:39:07 PrimaryIP: 10.118.26.25 2025/05/27 12:39:07 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:39:07 Opening connection to 10.118.24.33 2025/05/27 12:39:07 Clone required: true 2025/05/27 12:39:07 Checking if a clone in progress 2025/05/27 12:39:07 Clone in progress: false 2025/05/27 12:39:07 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-clear-eel 2025/05/27 12:39:08 Clone finished. Restarting container... kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:39:08 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:39:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 100ms (100ms including waiting). Image size: 436542832 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:39:57 +0000 UTC Normal Pod aws-cli Binding Scheduled Successfully assigned kuttl-test-clear-eel/aws-cli to gke-jen-ps-916-a5bda550--default-pool-01b0dbd2-80j8 default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:39:58 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:39:58 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 99ms (99ms including waiting). Image size: 30314917 bytes. kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:39:58 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container: aws-cli kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:39:58 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:02 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Warning Pod demand-backup-haproxy-0 Scheduling FailedScheduling running Bind plugin "DefaultBinder": Operation cannot be fulfilled on pods/binding "demand-backup-haproxy-0": pod demand-backup-haproxy-0 is being deleted, cannot be assigned to a host default-scheduler logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:40:09 | demand-backup | 2025-05-27 12:40:06 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/05/27 12:40:06 MySQL state is not ready... kubelet logger.go:42: 12:40:10 | demand-backup | Deleting namespace: kuttl-test-clear-eel === NAME kuttl harness.go:407: run tests finished harness.go:515: cleaning up harness.go:572: removing temp folder: "" --- PASS: kuttl (2136.02s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup (2135.57s) PASS