=== RUN kuttl harness.go:459: starting setup harness.go:254: running tests using configured kubeconfig. harness.go:277: Successful connection to cluster at: https://34.31.151.230 harness.go:362: running tests harness.go:74: going to run test suite with timeout of 180 seconds for each step harness.go:374: testsuite: e2e-tests/tests has 39 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup === PAUSE kuttl/harness/demand-backup === CONT kuttl/harness/demand-backup logger.go:42: 06:06:00 | demand-backup | Creating namespace: kuttl-test-safe-ladybug logger.go:42: 06:06:01 | demand-backup/0-minio-secret | starting test step 0-minio-secret logger.go:42: 06:06:01 | demand-backup/0-minio-secret | Secret:kuttl-test-safe-ladybug/minio-secret created logger.go:42: 06:06:02 | demand-backup/0-minio-secret | test step completed 0-minio-secret logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | starting test step 1-deploy-operator logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep apply_s3_storage_secrets deploy_operator deploy_tls_cluster_secrets deploy_client deploy_minio] logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | + source ../../functions logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ realpath ../../.. logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | ++++ pwd logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | ++ test_name=demand-backup logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ MYSQL_VERSION=8.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export MINIO_VER=5.4.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ MINIO_VER=5.4.0 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ export VAULT_VER=0.16.1 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ VAULT_VER=0.16.1 logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | ++++ which gdate logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | ++++ which date logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ date=/usr/sbin/date logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ oc get projects logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ : logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ kubectl get nodes logger.go:42: 06:06:02 | demand-backup/1-deploy-operator | +++ grep '^minikube' logger.go:42: 06:06:03 | demand-backup/1-deploy-operator | ++ oc get projects logger.go:42: 06:06:03 | demand-backup/1-deploy-operator | +++ kubectl version -o json logger.go:42: 06:06:03 | demand-backup/1-deploy-operator | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:06:03 | demand-backup/1-deploy-operator | +++ grep '\-eks\-' logger.go:42: 06:06:03 | demand-backup/1-deploy-operator | grep: warning: stray \ before - logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | ++ '[' ']' logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | ++ EKS=0 logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | + init_temp_dir logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | + rm -rf /tmp/kuttl/ps/demand-backup logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | + mkdir -p /tmp/kuttl/ps/demand-backup logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | + apply_s3_storage_secrets logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | + apply_minio_secret logger.go:42: 06:06:04 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-safe-ladybug apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf/minio-secret.yml logger.go:42: 06:06:05 | demand-backup/1-deploy-operator | Warning: resource secrets/minio-secret is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 06:06:05 | demand-backup/1-deploy-operator | secret/minio-secret configured logger.go:42: 06:06:05 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-safe-ladybug apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf/cloud-secret.yml logger.go:42: 06:06:06 | demand-backup/1-deploy-operator | secret/aws-s3-secret created logger.go:42: 06:06:07 | demand-backup/1-deploy-operator | secret/gcp-cs-secret created logger.go:42: 06:06:07 | demand-backup/1-deploy-operator | secret/azure-secret created logger.go:42: 06:06:07 | demand-backup/1-deploy-operator | + deploy_operator logger.go:42: 06:06:07 | demand-backup/1-deploy-operator | + destroy_operator logger.go:42: 06:06:07 | demand-backup/1-deploy-operator | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 06:06:07 | demand-backup/1-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | Error from server (NotFound): deployments.apps "percona-server-mysql-operator" not found logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + true logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | Error from server (NotFound): namespaces "ps-operator" not found logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + true logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + create_namespace ps-operator logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + local namespace=ps-operator logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + [[ -n '' ]] logger.go:42: 06:06:08 | demand-backup/1-deploy-operator | + kubectl delete namespace ps-operator --ignore-not-found logger.go:42: 06:06:09 | demand-backup/1-deploy-operator | + kubectl wait --for=delete namespace ps-operator logger.go:42: 06:06:09 | demand-backup/1-deploy-operator | + kubectl create namespace ps-operator logger.go:42: 06:06:09 | demand-backup/1-deploy-operator | namespace/ps-operator created logger.go:42: 06:06:09 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy/crd.yaml logger.go:42: 06:06:11 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 06:06:11 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 06:06:12 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 06:06:12 | demand-backup/1-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 06:06:12 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy/cw-rbac.yaml logger.go:42: 06:06:14 | demand-backup/1-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 06:06:14 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 06:06:15 | demand-backup/1-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 06:06:15 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 06:06:16 | demand-backup/1-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 06:06:16 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 06:06:16 | demand-backup/1-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:06:16 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "DEBUG"' logger.go:42: 06:06:16 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply -f - logger.go:42: 06:06:16 | demand-backup/1-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-1041-fa9862d8"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy/cw-operator.yaml logger.go:42: 06:06:19 | demand-backup/1-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 06:06:19 | demand-backup/1-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 06:06:19 | demand-backup/1-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 06:06:19 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-safe-ladybug apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf/ssl-secret.yaml logger.go:42: 06:06:20 | demand-backup/1-deploy-operator | secret/test-ssl created logger.go:42: 06:06:20 | demand-backup/1-deploy-operator | + deploy_client logger.go:42: 06:06:20 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-safe-ladybug apply -f - logger.go:42: 06:06:20 | demand-backup/1-deploy-operator | ++ printf '.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:06:20 | demand-backup/1-deploy-operator | + yq eval '.spec.containers[0].image="perconalab/percona-server-mysql-operator:main-psmysql8.0"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf/client.yaml logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | pod/mysql-client created logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | + deploy_minio logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | + local access_key logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | + local secret_key logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-safe-ladybug get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | + access_key=some-access-key logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-safe-ladybug get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | + secret_key=some-secret-key logger.go:42: 06:06:22 | demand-backup/1-deploy-operator | + helm uninstall -n kuttl-test-safe-ladybug minio-service logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | Error: uninstall: Release not loaded: minio-service: release: not found logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + : logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + helm repo remove minio logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | Error: no repositories configured logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + : logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + helm repo add minio https://charts.min.io/ logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | "minio" has been added to your repositories logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | +++ printf %q some-access-key logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | ++ printf %q some-access-key logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | +++ printf %q some-secret-key logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | ++ printf %q some-secret-key logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + retry 10 60 helm install minio-service -n kuttl-test-safe-ladybug --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + local max=10 logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + local delay=60 logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + shift 2 logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + local n=1 logger.go:42: 06:06:23 | demand-backup/1-deploy-operator | + helm install minio-service -n kuttl-test-safe-ladybug --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | NAME: minio-service logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | LAST DEPLOYED: Thu Sep 11 06:06:24 2025 logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | NAMESPACE: kuttl-test-safe-ladybug logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | STATUS: deployed logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | REVISION: 1 logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | TEST SUITE: None logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | NOTES: logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | MinIO can be accessed via port 9000 on the following DNS name from within your cluster: logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | minio-service.kuttl-test-safe-ladybug.cluster.local logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | To access MinIO from localhost, run the below commands: logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | 1. export POD_NAME=$(kubectl get pods --namespace kuttl-test-safe-ladybug -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | 2. kubectl port-forward $POD_NAME 9000 --namespace kuttl-test-safe-ladybug logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace kuttl-test-safe-ladybug minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace kuttl-test-safe-ladybug minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | 3. mc ls minio-service-local logger.go:42: 06:06:52 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-safe-ladybug get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:06:53 | demand-backup/1-deploy-operator | + MINIO_POD=minio-service-86dfccd949-fj6xm logger.go:42: 06:06:53 | demand-backup/1-deploy-operator | + wait_pod minio-service-86dfccd949-fj6xm logger.go:42: 06:06:53 | demand-backup/1-deploy-operator | + local pod=minio-service-86dfccd949-fj6xm logger.go:42: 06:06:53 | demand-backup/1-deploy-operator | + set +o xtrace logger.go:42: 06:06:53 | demand-backup/1-deploy-operator | minio-service-86dfccd949-fj6xmtrue logger.go:42: 06:06:53 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-safe-ladybug run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID='\''some-access-key'\'' AWS_SECRET_ACCESS_KEY='\''some-secret-key'\'' AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' logger.go:42: 06:06:57 | demand-backup/1-deploy-operator | All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. logger.go:42: 06:06:57 | demand-backup/1-deploy-operator | If you don't see a command prompt, try pressing enter. logger.go:42: 06:07:04 | demand-backup/1-deploy-operator | pod "aws-cli" deleted from kuttl-test-safe-ladybug namespace logger.go:42: 06:07:05 | demand-backup/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 06:07:05 | demand-backup/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 06:07:05 | demand-backup/1-deploy-operator | INFO Found 1 resource(s). logger.go:42: 06:07:05 | demand-backup/1-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 06:07:05 | demand-backup/1-deploy-operator | percona-server-mysql-operator ps-operator 1 logger.go:42: 06:07:05 | demand-backup/1-deploy-operator | ASSERT PASS logger.go:42: 06:07:05 | demand-backup/1-deploy-operator | test step completed 1-deploy-operator logger.go:42: 06:07:05 | demand-backup/2-create-cluster | starting test step 2-create-cluster logger.go:42: 06:07:05 | demand-backup/2-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval ".spec.mysql.size=3" - \ | yq eval ".spec.proxy.haproxy.enabled=true" - \ | yq eval ".spec.proxy.haproxy.size=3" - \ | yq eval ".spec.orchestrator.enabled=true" - \ | yq eval ".spec.orchestrator.size=3" - \ | yq eval '.spec.backup.storages.minio.type="s3"' - \ | yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - \ | yq eval ".spec.backup.storages.minio.s3.endpointUrl=\"http://minio-service.${NAMESPACE}:9000\"" - \ | yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.minio.containerOptions.env[0].name="VERIFY_TLS"' - \ | yq eval '.spec.backup.storages.minio.containerOptions.env[0].value="false"' - \ | yq eval '.spec.backup.storages.minio.containerOptions.env[1].name="CUSTOM_ENV"' - \ | yq eval '.spec.backup.storages.minio.containerOptions.env[1].value="test"' - \ | yq eval '.spec.backup.storages.minio.containerOptions.args.xtrabackup[0]="--notsupported"' - \ | yq eval '.spec.backup.storages.minio.containerOptions.args.xbcloud[0]="--insecure"' - \ | yq eval '.spec.backup.storages.aws-s3.type="s3"' - \ | yq eval ".spec.backup.storages.aws-s3.verifyTLS=true" - \ | yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - \ | yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - \ | yq eval ".spec.backup.storages.gcp-cs.verifyTLS=true" - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - \ | yq eval '.spec.backup.storages.azure-blob.type="azure"' - \ | yq eval ".spec.backup.storages.azure-blob.verifyTLS=true" - \ | yq eval '.spec.backup.storages.azure-blob.azure.container="operator-testing"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 06:07:05 | demand-backup/2-create-cluster | + source ../../functions logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ realpath ../../.. logger.go:42: 06:07:05 | demand-backup/2-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | ++++ pwd logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:07:05 | demand-backup/2-create-cluster | ++ test_name=demand-backup logger.go:42: 06:07:05 | demand-backup/2-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:07:05 | demand-backup/2-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ MYSQL_VERSION=8.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export MINIO_VER=5.4.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ MINIO_VER=5.4.0 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ export VAULT_VER=0.16.1 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ VAULT_VER=0.16.1 logger.go:42: 06:07:05 | demand-backup/2-create-cluster | ++++ which gdate logger.go:42: 06:07:05 | demand-backup/2-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:07:05 | demand-backup/2-create-cluster | ++++ which date logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ date=/usr/sbin/date logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ oc get projects logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ : logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ kubectl get nodes logger.go:42: 06:07:05 | demand-backup/2-create-cluster | +++ grep '^minikube' logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ oc get projects logger.go:42: 06:07:06 | demand-backup/2-create-cluster | +++ kubectl version -o json logger.go:42: 06:07:06 | demand-backup/2-create-cluster | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:07:06 | demand-backup/2-create-cluster | +++ grep '\-eks\-' logger.go:42: 06:07:06 | demand-backup/2-create-cluster | grep: warning: stray \ before - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ '[' ']' logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ EKS=0 logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + get_cr logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + local name_suffix= logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.mysql.size=3 - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.size=3 - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.metadata.name="%s"' demand-backup logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.metadata.name="demand-backup"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy/cr.yaml logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.mysql.gracePeriod=30 - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.spec.initContainer.image="%s"' perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.type="s3"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.endpointUrl="http://minio-service.kuttl-test-safe-ladybug:9000"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.initContainer.image="perconalab/percona-server-mysql-operator:PR-1041-fa9862d8"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql8.0"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.containerOptions.env[0].name="VERIFY_TLS"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.containerOptions.env[1].name="CUSTOM_ENV"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.containerOptions.env[0].value="false"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.gcp-cs.verifyTLS=true - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.containerOptions.args.xtrabackup[0]="--notsupported"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.type="s3"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.containerOptions.env[1].value="test"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.containerOptions.args.xbcloud[0]="--insecure"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.aws-s3.verifyTLS=true - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:07:06 | demand-backup/2-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router8.0"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + '[' -n '' ']' logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.container="operator-testing"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.type="azure"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup8.0"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.azure-blob.verifyTLS=true - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + kubectl -n kuttl-test-safe-ladybug apply -f - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - logger.go:42: 06:07:06 | demand-backup/2-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 06:07:08 | demand-backup/2-create-cluster | perconaservermysql.ps.percona.com/demand-backup created logger.go:42: 06:11:30 | demand-backup/2-create-cluster | test step completed 2-create-cluster logger.go:42: 06:11:30 | demand-backup/3-write-data | starting test step 3-write-data logger.go:42: 06:11:30 | demand-backup/3-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -p'$password'" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -p'$password'"] logger.go:42: 06:11:30 | demand-backup/3-write-data | + source ../../functions logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ realpath ../../.. logger.go:42: 06:11:30 | demand-backup/3-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:11:30 | demand-backup/3-write-data | ++++ pwd logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:11:30 | demand-backup/3-write-data | ++ test_name=demand-backup logger.go:42: 06:11:30 | demand-backup/3-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:11:30 | demand-backup/3-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:11:30 | demand-backup/3-write-data | ++++ which gdate logger.go:42: 06:11:30 | demand-backup/3-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:11:30 | demand-backup/3-write-data | ++++ which date logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ date=/usr/sbin/date logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ oc get projects logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ : logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ kubectl get nodes logger.go:42: 06:11:30 | demand-backup/3-write-data | +++ grep '^minikube' logger.go:42: 06:11:31 | demand-backup/3-write-data | ++ oc get projects logger.go:42: 06:11:31 | demand-backup/3-write-data | +++ kubectl version -o json logger.go:42: 06:11:31 | demand-backup/3-write-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:11:31 | demand-backup/3-write-data | +++ grep '\-eks\-' logger.go:42: 06:11:31 | demand-backup/3-write-data | grep: warning: stray \ before - logger.go:42: 06:11:31 | demand-backup/3-write-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:11:31 | demand-backup/3-write-data | ++ '[' ']' logger.go:42: 06:11:31 | demand-backup/3-write-data | ++ EKS=0 logger.go:42: 06:11:31 | demand-backup/3-write-data | ++ get_user_pass root logger.go:42: 06:11:31 | demand-backup/3-write-data | ++ local user=root logger.go:42: 06:11:31 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:11:31 | demand-backup/3-write-data | ++ base64 --decode logger.go:42: 06:11:31 | demand-backup/3-write-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:11:31 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 06:11:31 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:11:32 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 06:11:32 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 06:11:32 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 06:11:32 | demand-backup/3-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:11:32 | demand-backup/3-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 06:11:32 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:11:32 | demand-backup/3-write-data | + local pod= logger.go:42: 06:11:32 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 06:11:32 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:11:32 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 06:11:32 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 06:11:32 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 06:11:32 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 06:11:33 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 06:11:33 | demand-backup/3-write-data | + kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:11:33 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 06:11:33 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:11:34 | demand-backup/3-write-data | + : logger.go:42: 06:11:34 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 06:11:34 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:11:34 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 06:11:34 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 06:11:34 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 06:11:34 | demand-backup/3-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:11:34 | demand-backup/3-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)' logger.go:42: 06:11:34 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:11:34 | demand-backup/3-write-data | + local pod= logger.go:42: 06:11:34 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 06:11:34 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:11:35 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 06:11:35 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 06:11:35 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 06:11:35 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 06:11:35 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 06:11:35 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 06:11:35 | demand-backup/3-write-data | + kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:11:35 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:11:36 | demand-backup/3-write-data | + : logger.go:42: 06:11:36 | demand-backup/3-write-data | test step completed 3-write-data logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | starting test step 4-move-primary-before-backup logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary_pod_from_label="$(get_primary_from_label)" kubectl delete pod -n ${NAMESPACE} ${primary_pod_from_label} wait_cluster_consistency_async "${test_name}" "3" "3" new_primary_pod_from_label="$(get_primary_from_label)" if [ "${primary_pod_from_label}" == "${new_primary_pod_from_label}" ]; then echo "Old (${primary_pod_from_label}) and new (${new_primary_pod_from_label}) primary are the same (the failover didn't happen)!" exit 1 fi] logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | + source ../../functions logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ realpath ../../.. logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | ++++ pwd logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | ++ test_name=demand-backup logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ MYSQL_VERSION=8.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export MINIO_VER=5.4.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ MINIO_VER=5.4.0 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ export VAULT_VER=0.16.1 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ VAULT_VER=0.16.1 logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | ++++ which gdate logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | ++++ which date logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ date=/usr/sbin/date logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ oc get projects logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ : logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ kubectl get nodes logger.go:42: 06:11:36 | demand-backup/4-move-primary-before-backup | +++ grep '^minikube' logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | ++ oc get projects logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | +++ kubectl version -o json logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | +++ grep '\-eks\-' logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | grep: warning: stray \ before - logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | ++ '[' ']' logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | ++ EKS=0 logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 06:11:37 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-safe-ladybug get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 06:11:38 | demand-backup/4-move-primary-before-backup | + primary_pod_from_label=demand-backup-mysql-0 logger.go:42: 06:11:38 | demand-backup/4-move-primary-before-backup | + kubectl delete pod -n kuttl-test-safe-ladybug demand-backup-mysql-0 logger.go:42: 06:11:38 | demand-backup/4-move-primary-before-backup | pod "demand-backup-mysql-0" deleted from kuttl-test-safe-ladybug namespace logger.go:42: 06:11:58 | demand-backup/4-move-primary-before-backup | + wait_cluster_consistency_async demand-backup 3 3 logger.go:42: 06:11:58 | demand-backup/4-move-primary-before-backup | + local cluster_name=demand-backup logger.go:42: 06:11:58 | demand-backup/4-move-primary-before-backup | + local cluster_size=3 logger.go:42: 06:11:58 | demand-backup/4-move-primary-before-backup | + local orc_size=3 logger.go:42: 06:11:58 | demand-backup/4-move-primary-before-backup | + '[' -z 3 ']' logger.go:42: 06:11:58 | demand-backup/4-move-primary-before-backup | + sleep 7 logger.go:42: 06:12:05 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.state}' logger.go:42: 06:12:06 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 06:12:06 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 06:12:06 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 06:12:06 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 06:12:21 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.state}' logger.go:42: 06:12:21 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 06:12:21 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 06:12:21 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 06:12:21 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 06:12:36 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.state}' logger.go:42: 06:12:37 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 06:12:37 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 06:12:37 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 06:12:37 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 06:12:52 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.state}' logger.go:42: 06:12:52 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 06:12:52 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 06:12:52 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 06:12:52 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 06:13:07 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.state}' logger.go:42: 06:13:08 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 06:13:08 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 06:13:08 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 06:13:08 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 06:13:23 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.state}' logger.go:42: 06:13:23 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 06:13:23 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 06:13:23 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 06:13:23 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 06:13:38 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.state}' logger.go:42: 06:13:39 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 06:13:39 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 06:13:39 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 06:13:39 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 06:13:54 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.state}' logger.go:42: 06:13:55 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 06:13:55 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.mysql.ready}' logger.go:42: 06:13:55 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 06:13:55 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.orchestrator.ready}' logger.go:42: 06:13:56 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 06:13:56 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.orchestrator.state}' logger.go:42: 06:13:56 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 06:13:56 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-safe-ladybug -o 'jsonpath={.status.state}' logger.go:42: 06:13:57 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 06:13:57 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 06:13:57 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-safe-ladybug get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 06:13:57 | demand-backup/4-move-primary-before-backup | + new_primary_pod_from_label=demand-backup-mysql-1 logger.go:42: 06:13:57 | demand-backup/4-move-primary-before-backup | + '[' demand-backup-mysql-0 == demand-backup-mysql-1 ']' logger.go:42: 06:13:57 | demand-backup/4-move-primary-before-backup | test step completed 4-move-primary-before-backup logger.go:42: 06:13:57 | demand-backup/5-create-backup-minio | starting test step 5-create-backup-minio logger.go:42: 06:13:58 | demand-backup/5-create-backup-minio | PerconaServerMySQLBackup:kuttl-test-safe-ladybug/demand-backup-minio created logger.go:42: 06:14:10 | demand-backup/5-create-backup-minio | test step completed 5-create-backup-minio logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | starting test step 6-check-cmd-flags logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | running command: [sh -c set -o errexit set -o xtrace source ../../functions restore_pod_name=$(kubectl get pod -n ${NAMESPACE} -l batch.kubernetes.io/job-name=xb-demand-backup-minio-minio -o name) xtrabackup_flag_count=$(kubectl logs -n ${NAMESPACE} $restore_pod_name | grep -- "--strict" | wc -l) if [ "$xtrabackup_flag_count" -eq 0 ]; then echo "custom flag --strict was provided to the restore but it's not mentioned in the logs" exit 1 fi] logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | + source ../../functions logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ realpath ../../.. logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | ++++ pwd logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | ++ test_name=demand-backup logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ MYSQL_VERSION=8.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export MINIO_VER=5.4.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ MINIO_VER=5.4.0 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ export VAULT_VER=0.16.1 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ VAULT_VER=0.16.1 logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | ++++ which gdate logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | ++++ which date logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ date=/usr/sbin/date logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ oc get projects logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ : logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ kubectl get nodes logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ grep '^minikube' logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | ++ oc get projects logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ kubectl version -o json logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | +++ grep '\-eks\-' logger.go:42: 06:14:10 | demand-backup/6-check-cmd-flags | grep: warning: stray \ before - logger.go:42: 06:14:11 | demand-backup/6-check-cmd-flags | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:14:11 | demand-backup/6-check-cmd-flags | ++ '[' ']' logger.go:42: 06:14:11 | demand-backup/6-check-cmd-flags | ++ EKS=0 logger.go:42: 06:14:11 | demand-backup/6-check-cmd-flags | ++ kubectl get pod -n kuttl-test-safe-ladybug -l batch.kubernetes.io/job-name=xb-demand-backup-minio-minio -o name logger.go:42: 06:14:11 | demand-backup/6-check-cmd-flags | + restore_pod_name=pod/xb-demand-backup-minio-minio-sksjv logger.go:42: 06:14:11 | demand-backup/6-check-cmd-flags | ++ kubectl logs -n kuttl-test-safe-ladybug pod/xb-demand-backup-minio-minio-sksjv logger.go:42: 06:14:11 | demand-backup/6-check-cmd-flags | ++ grep -- --strict logger.go:42: 06:14:11 | demand-backup/6-check-cmd-flags | ++ wc -l logger.go:42: 06:14:12 | demand-backup/6-check-cmd-flags | Defaulted container "xtrabackup" out of: xtrabackup, xtrabackup-init (init) logger.go:42: 06:14:12 | demand-backup/6-check-cmd-flags | + xtrabackup_flag_count=1 logger.go:42: 06:14:12 | demand-backup/6-check-cmd-flags | + '[' 1 -eq 0 ']' logger.go:42: 06:14:12 | demand-backup/6-check-cmd-flags | test step completed 6-check-cmd-flags logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | starting test step 7-check-password-leak logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions # Temporarily skipping this check #check_passwords_leak] logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | + source ../../functions logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ realpath ../../.. logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | ++++ pwd logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | ++ test_name=demand-backup logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ MYSQL_VERSION=8.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export MINIO_VER=5.4.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ MINIO_VER=5.4.0 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ export VAULT_VER=0.16.1 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ VAULT_VER=0.16.1 logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | ++++ which gdate logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | ++++ which date logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ date=/usr/sbin/date logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ oc get projects logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ : logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ grep '^minikube' logger.go:42: 06:14:12 | demand-backup/7-check-password-leak | +++ kubectl get nodes logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | ++ oc get projects logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | +++ kubectl version -o json logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | +++ grep '\-eks\-' logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | grep: warning: stray \ before - logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | ++ '[' ']' logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | ++ EKS=0 logger.go:42: 06:14:13 | demand-backup/7-check-password-leak | test step completed 7-check-password-leak logger.go:42: 06:14:13 | demand-backup/8-delete-data | starting test step 8-delete-data logger.go:42: 06:14:13 | demand-backup/8-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -p'$password'" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 06:14:13 | demand-backup/8-delete-data | + source ../../functions logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ realpath ../../.. logger.go:42: 06:14:13 | demand-backup/8-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:13 | demand-backup/8-delete-data | ++++ pwd logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:14:13 | demand-backup/8-delete-data | ++ test_name=demand-backup logger.go:42: 06:14:13 | demand-backup/8-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:14:13 | demand-backup/8-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:14:13 | demand-backup/8-delete-data | ++++ which gdate logger.go:42: 06:14:13 | demand-backup/8-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:14:13 | demand-backup/8-delete-data | ++++ which date logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ date=/usr/sbin/date logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ oc get projects logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ : logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ kubectl get nodes logger.go:42: 06:14:13 | demand-backup/8-delete-data | +++ grep '^minikube' logger.go:42: 06:14:14 | demand-backup/8-delete-data | ++ oc get projects logger.go:42: 06:14:14 | demand-backup/8-delete-data | +++ kubectl version -o json logger.go:42: 06:14:14 | demand-backup/8-delete-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:14:14 | demand-backup/8-delete-data | +++ grep '\-eks\-' logger.go:42: 06:14:14 | demand-backup/8-delete-data | grep: warning: stray \ before - logger.go:42: 06:14:14 | demand-backup/8-delete-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:14:14 | demand-backup/8-delete-data | ++ '[' ']' logger.go:42: 06:14:14 | demand-backup/8-delete-data | ++ EKS=0 logger.go:42: 06:14:14 | demand-backup/8-delete-data | ++ get_user_pass root logger.go:42: 06:14:14 | demand-backup/8-delete-data | ++ local user=root logger.go:42: 06:14:14 | demand-backup/8-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:14:14 | demand-backup/8-delete-data | ++ base64 --decode logger.go:42: 06:14:15 | demand-backup/8-delete-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:14:15 | demand-backup/8-delete-data | +++ get_cluster_name logger.go:42: 06:14:15 | demand-backup/8-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:14:15 | demand-backup/8-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 06:14:15 | demand-backup/8-delete-data | ++ local cluster=demand-backup logger.go:42: 06:14:15 | demand-backup/8-delete-data | ++ echo demand-backup-haproxy logger.go:42: 06:14:15 | demand-backup/8-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:15 | demand-backup/8-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 06:14:15 | demand-backup/8-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:15 | demand-backup/8-delete-data | + local pod= logger.go:42: 06:14:15 | demand-backup/8-delete-data | ++ get_client_pod logger.go:42: 06:14:15 | demand-backup/8-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:14:16 | demand-backup/8-delete-data | + client_pod=mysql-client logger.go:42: 06:14:16 | demand-backup/8-delete-data | + wait_pod mysql-client logger.go:42: 06:14:16 | demand-backup/8-delete-data | + local pod=mysql-client logger.go:42: 06:14:16 | demand-backup/8-delete-data | + set +o xtrace logger.go:42: 06:14:16 | demand-backup/8-delete-data | mysql-clienttrue logger.go:42: 06:14:16 | demand-backup/8-delete-data | + kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:16 | demand-backup/8-delete-data | + sed -e 's/mysql: //' logger.go:42: 06:14:16 | demand-backup/8-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:14:17 | demand-backup/8-delete-data | + : logger.go:42: 06:14:17 | demand-backup/8-delete-data | ++ get_cluster_name logger.go:42: 06:14:17 | demand-backup/8-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:14:18 | demand-backup/8-delete-data | + cluster_name=demand-backup logger.go:42: 06:14:18 | demand-backup/8-delete-data | + for i in 0 1 2 logger.go:42: 06:14:18 | demand-backup/8-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:18 | demand-backup/8-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:14:18 | demand-backup/8-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:18 | demand-backup/8-delete-data | ++ local pod= logger.go:42: 06:14:18 | demand-backup/8-delete-data | +++ get_client_pod logger.go:42: 06:14:18 | demand-backup/8-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:14:18 | demand-backup/8-delete-data | ++ client_pod=mysql-client logger.go:42: 06:14:18 | demand-backup/8-delete-data | ++ wait_pod mysql-client logger.go:42: 06:14:18 | demand-backup/8-delete-data | ++ local pod=mysql-client logger.go:42: 06:14:18 | demand-backup/8-delete-data | ++ set +o xtrace logger.go:42: 06:14:19 | demand-backup/8-delete-data | mysql-clienttrue logger.go:42: 06:14:19 | demand-backup/8-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:19 | demand-backup/8-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:14:19 | demand-backup/8-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:14:19 | demand-backup/8-delete-data | ++ : logger.go:42: 06:14:19 | demand-backup/8-delete-data | + data= logger.go:42: 06:14:19 | demand-backup/8-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 04-delete-data-minio-0 --from-literal=data= logger.go:42: 06:14:20 | demand-backup/8-delete-data | configmap/04-delete-data-minio-0 created logger.go:42: 06:14:20 | demand-backup/8-delete-data | + for i in 0 1 2 logger.go:42: 06:14:20 | demand-backup/8-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:20 | demand-backup/8-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:14:20 | demand-backup/8-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:20 | demand-backup/8-delete-data | ++ local pod= logger.go:42: 06:14:20 | demand-backup/8-delete-data | +++ get_client_pod logger.go:42: 06:14:20 | demand-backup/8-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:14:20 | demand-backup/8-delete-data | ++ client_pod=mysql-client logger.go:42: 06:14:20 | demand-backup/8-delete-data | ++ wait_pod mysql-client logger.go:42: 06:14:20 | demand-backup/8-delete-data | ++ local pod=mysql-client logger.go:42: 06:14:20 | demand-backup/8-delete-data | ++ set +o xtrace logger.go:42: 06:14:21 | demand-backup/8-delete-data | mysql-clienttrue logger.go:42: 06:14:21 | demand-backup/8-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:21 | demand-backup/8-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:14:21 | demand-backup/8-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:14:22 | demand-backup/8-delete-data | ++ : logger.go:42: 06:14:22 | demand-backup/8-delete-data | + data= logger.go:42: 06:14:22 | demand-backup/8-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 04-delete-data-minio-1 --from-literal=data= logger.go:42: 06:14:22 | demand-backup/8-delete-data | configmap/04-delete-data-minio-1 created logger.go:42: 06:14:22 | demand-backup/8-delete-data | + for i in 0 1 2 logger.go:42: 06:14:22 | demand-backup/8-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:22 | demand-backup/8-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:14:22 | demand-backup/8-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:22 | demand-backup/8-delete-data | ++ local pod= logger.go:42: 06:14:22 | demand-backup/8-delete-data | +++ get_client_pod logger.go:42: 06:14:22 | demand-backup/8-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:14:23 | demand-backup/8-delete-data | ++ client_pod=mysql-client logger.go:42: 06:14:23 | demand-backup/8-delete-data | ++ wait_pod mysql-client logger.go:42: 06:14:23 | demand-backup/8-delete-data | ++ local pod=mysql-client logger.go:42: 06:14:23 | demand-backup/8-delete-data | ++ set +o xtrace logger.go:42: 06:14:23 | demand-backup/8-delete-data | mysql-clienttrue logger.go:42: 06:14:23 | demand-backup/8-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:14:23 | demand-backup/8-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:14:23 | demand-backup/8-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:14:24 | demand-backup/8-delete-data | ++ : logger.go:42: 06:14:24 | demand-backup/8-delete-data | + data= logger.go:42: 06:14:24 | demand-backup/8-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 04-delete-data-minio-2 --from-literal=data= logger.go:42: 06:14:25 | demand-backup/8-delete-data | configmap/04-delete-data-minio-2 created logger.go:42: 06:14:26 | demand-backup/8-delete-data | test step completed 8-delete-data logger.go:42: 06:14:26 | demand-backup/9-restore-from-minio | starting test step 9-restore-from-minio logger.go:42: 06:14:27 | demand-backup/9-restore-from-minio | PerconaServerMySQLRestore:kuttl-test-safe-ladybug/demand-backup-restore-minio created logger.go:42: 06:18:36 | demand-backup/9-restore-from-minio | test step completed 9-restore-from-minio logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | starting test step 10-check-cmd-flags logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | running command: [sh -c set -o errexit set -o xtrace source ../../functions restore_pod_name=$(kubectl get pod -n ${NAMESPACE} -l batch.kubernetes.io/job-name=xb-restore-demand-backup-restore-minio -o name) xbstream_flag_count=$(kubectl logs -n ${NAMESPACE} $restore_pod_name | grep -- "--verbose" | wc -l) if [ "$xbstream_flag_count" -eq 0 ]; then echo "custom flag --verbose was provided to the restore but it's not mentioned in the logs" exit 1 fi xtrabackup_flag_count=$(kubectl logs -n ${NAMESPACE} $restore_pod_name | grep -- "--strict" | wc -l) if [ "$xtrabackup_flag_count" -eq 0 ]; then echo "custom flag --strict was provided to the restore but it's not mentioned in the logs" exit 1 fi] logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | + source ../../functions logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ realpath ../../.. logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | ++++ pwd logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | ++ test_name=demand-backup logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ MYSQL_VERSION=8.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export MINIO_VER=5.4.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ MINIO_VER=5.4.0 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ export VAULT_VER=0.16.1 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ VAULT_VER=0.16.1 logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | ++++ which gdate logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | ++++ which date logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ date=/usr/sbin/date logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ oc get projects logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ : logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ kubectl get nodes logger.go:42: 06:18:36 | demand-backup/10-check-cmd-flags | +++ grep '^minikube' logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | ++ oc get projects logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | +++ kubectl version -o json logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | +++ grep '\-eks\-' logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | grep: warning: stray \ before - logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | ++ '[' ']' logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | ++ EKS=0 logger.go:42: 06:18:37 | demand-backup/10-check-cmd-flags | ++ kubectl get pod -n kuttl-test-safe-ladybug -l batch.kubernetes.io/job-name=xb-restore-demand-backup-restore-minio -o name logger.go:42: 06:18:38 | demand-backup/10-check-cmd-flags | + restore_pod_name=pod/xb-restore-demand-backup-restore-minio-qtldc logger.go:42: 06:18:38 | demand-backup/10-check-cmd-flags | ++ grep -- --verbose logger.go:42: 06:18:38 | demand-backup/10-check-cmd-flags | ++ kubectl logs -n kuttl-test-safe-ladybug pod/xb-restore-demand-backup-restore-minio-qtldc logger.go:42: 06:18:38 | demand-backup/10-check-cmd-flags | ++ wc -l logger.go:42: 06:18:38 | demand-backup/10-check-cmd-flags | Defaulted container "xtrabackup" out of: xtrabackup, xtrabackup-init (init) logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | + xbstream_flag_count=1 logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | + '[' 1 -eq 0 ']' logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | ++ kubectl logs -n kuttl-test-safe-ladybug pod/xb-restore-demand-backup-restore-minio-qtldc logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | ++ grep -- --strict logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | ++ wc -l logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | Defaulted container "xtrabackup" out of: xtrabackup, xtrabackup-init (init) logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | + xtrabackup_flag_count=4 logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | + '[' 4 -eq 0 ']' logger.go:42: 06:18:39 | demand-backup/10-check-cmd-flags | test step completed 10-check-cmd-flags logger.go:42: 06:18:39 | demand-backup/11-read-data | starting test step 11-read-data logger.go:42: 06:18:39 | demand-backup/11-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 06-read-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 06:18:39 | demand-backup/11-read-data | + source ../../functions logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ realpath ../../.. logger.go:42: 06:18:39 | demand-backup/11-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:39 | demand-backup/11-read-data | ++++ pwd logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:18:39 | demand-backup/11-read-data | ++ test_name=demand-backup logger.go:42: 06:18:39 | demand-backup/11-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:18:39 | demand-backup/11-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:18:39 | demand-backup/11-read-data | ++++ which gdate logger.go:42: 06:18:39 | demand-backup/11-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:18:39 | demand-backup/11-read-data | ++++ which date logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ date=/usr/sbin/date logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ oc get projects logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ : logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ kubectl get nodes logger.go:42: 06:18:39 | demand-backup/11-read-data | +++ grep '^minikube' logger.go:42: 06:18:40 | demand-backup/11-read-data | ++ oc get projects logger.go:42: 06:18:40 | demand-backup/11-read-data | +++ kubectl version -o json logger.go:42: 06:18:40 | demand-backup/11-read-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:18:40 | demand-backup/11-read-data | +++ grep '\-eks\-' logger.go:42: 06:18:40 | demand-backup/11-read-data | grep: warning: stray \ before - logger.go:42: 06:18:40 | demand-backup/11-read-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:18:40 | demand-backup/11-read-data | ++ '[' ']' logger.go:42: 06:18:40 | demand-backup/11-read-data | ++ EKS=0 logger.go:42: 06:18:40 | demand-backup/11-read-data | ++ get_user_pass root logger.go:42: 06:18:40 | demand-backup/11-read-data | ++ local user=root logger.go:42: 06:18:40 | demand-backup/11-read-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:18:40 | demand-backup/11-read-data | ++ base64 --decode logger.go:42: 06:18:41 | demand-backup/11-read-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:18:41 | demand-backup/11-read-data | ++ get_cluster_name logger.go:42: 06:18:41 | demand-backup/11-read-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:18:41 | demand-backup/11-read-data | + cluster_name=demand-backup logger.go:42: 06:18:41 | demand-backup/11-read-data | + for i in 0 1 2 logger.go:42: 06:18:41 | demand-backup/11-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:41 | demand-backup/11-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:18:41 | demand-backup/11-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:41 | demand-backup/11-read-data | ++ local pod= logger.go:42: 06:18:41 | demand-backup/11-read-data | +++ get_client_pod logger.go:42: 06:18:41 | demand-backup/11-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:18:42 | demand-backup/11-read-data | ++ client_pod=mysql-client logger.go:42: 06:18:42 | demand-backup/11-read-data | ++ wait_pod mysql-client logger.go:42: 06:18:42 | demand-backup/11-read-data | ++ local pod=mysql-client logger.go:42: 06:18:42 | demand-backup/11-read-data | ++ set +o xtrace logger.go:42: 06:18:42 | demand-backup/11-read-data | mysql-clienttrue logger.go:42: 06:18:42 | demand-backup/11-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:42 | demand-backup/11-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:18:42 | demand-backup/11-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:18:43 | demand-backup/11-read-data | + data=100500 logger.go:42: 06:18:43 | demand-backup/11-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-minio-0 --from-literal=data=100500 logger.go:42: 06:18:44 | demand-backup/11-read-data | configmap/06-read-data-minio-0 created logger.go:42: 06:18:44 | demand-backup/11-read-data | + for i in 0 1 2 logger.go:42: 06:18:44 | demand-backup/11-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:44 | demand-backup/11-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:18:44 | demand-backup/11-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:44 | demand-backup/11-read-data | ++ local pod= logger.go:42: 06:18:44 | demand-backup/11-read-data | +++ get_client_pod logger.go:42: 06:18:44 | demand-backup/11-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:18:44 | demand-backup/11-read-data | ++ client_pod=mysql-client logger.go:42: 06:18:44 | demand-backup/11-read-data | ++ wait_pod mysql-client logger.go:42: 06:18:44 | demand-backup/11-read-data | ++ local pod=mysql-client logger.go:42: 06:18:44 | demand-backup/11-read-data | ++ set +o xtrace logger.go:42: 06:18:45 | demand-backup/11-read-data | mysql-clienttrue logger.go:42: 06:18:45 | demand-backup/11-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:45 | demand-backup/11-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:18:45 | demand-backup/11-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:18:46 | demand-backup/11-read-data | + data=100500 logger.go:42: 06:18:46 | demand-backup/11-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-minio-1 --from-literal=data=100500 logger.go:42: 06:18:46 | demand-backup/11-read-data | configmap/06-read-data-minio-1 created logger.go:42: 06:18:46 | demand-backup/11-read-data | + for i in 0 1 2 logger.go:42: 06:18:46 | demand-backup/11-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:46 | demand-backup/11-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:18:46 | demand-backup/11-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:46 | demand-backup/11-read-data | ++ local pod= logger.go:42: 06:18:46 | demand-backup/11-read-data | +++ get_client_pod logger.go:42: 06:18:46 | demand-backup/11-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:18:46 | demand-backup/11-read-data | ++ client_pod=mysql-client logger.go:42: 06:18:46 | demand-backup/11-read-data | ++ wait_pod mysql-client logger.go:42: 06:18:46 | demand-backup/11-read-data | ++ local pod=mysql-client logger.go:42: 06:18:46 | demand-backup/11-read-data | ++ set +o xtrace logger.go:42: 06:18:47 | demand-backup/11-read-data | mysql-clienttrue logger.go:42: 06:18:47 | demand-backup/11-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:47 | demand-backup/11-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:18:47 | demand-backup/11-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:18:48 | demand-backup/11-read-data | + data=100500 logger.go:42: 06:18:48 | demand-backup/11-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-minio-2 --from-literal=data=100500 logger.go:42: 06:18:48 | demand-backup/11-read-data | configmap/06-read-data-minio-2 created logger.go:42: 06:18:49 | demand-backup/11-read-data | test step completed 11-read-data logger.go:42: 06:18:49 | demand-backup/12-delete-data | starting test step 12-delete-data logger.go:42: 06:18:49 | demand-backup/12-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -p'$password'" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 06:18:49 | demand-backup/12-delete-data | + source ../../functions logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ realpath ../../.. logger.go:42: 06:18:49 | demand-backup/12-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:49 | demand-backup/12-delete-data | ++++ pwd logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:18:49 | demand-backup/12-delete-data | ++ test_name=demand-backup logger.go:42: 06:18:49 | demand-backup/12-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:18:49 | demand-backup/12-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:18:49 | demand-backup/12-delete-data | ++++ which gdate logger.go:42: 06:18:49 | demand-backup/12-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:18:49 | demand-backup/12-delete-data | ++++ which date logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ date=/usr/sbin/date logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ oc get projects logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ : logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ kubectl get nodes logger.go:42: 06:18:49 | demand-backup/12-delete-data | +++ grep '^minikube' logger.go:42: 06:18:50 | demand-backup/12-delete-data | ++ oc get projects logger.go:42: 06:18:50 | demand-backup/12-delete-data | +++ kubectl version -o json logger.go:42: 06:18:50 | demand-backup/12-delete-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:18:50 | demand-backup/12-delete-data | +++ grep '\-eks\-' logger.go:42: 06:18:50 | demand-backup/12-delete-data | grep: warning: stray \ before - logger.go:42: 06:18:50 | demand-backup/12-delete-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:18:50 | demand-backup/12-delete-data | ++ '[' ']' logger.go:42: 06:18:50 | demand-backup/12-delete-data | ++ EKS=0 logger.go:42: 06:18:50 | demand-backup/12-delete-data | ++ get_user_pass root logger.go:42: 06:18:50 | demand-backup/12-delete-data | ++ local user=root logger.go:42: 06:18:50 | demand-backup/12-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:18:50 | demand-backup/12-delete-data | ++ base64 --decode logger.go:42: 06:18:51 | demand-backup/12-delete-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:18:51 | demand-backup/12-delete-data | +++ get_cluster_name logger.go:42: 06:18:51 | demand-backup/12-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:18:51 | demand-backup/12-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 06:18:51 | demand-backup/12-delete-data | ++ local cluster=demand-backup logger.go:42: 06:18:51 | demand-backup/12-delete-data | ++ echo demand-backup-haproxy logger.go:42: 06:18:51 | demand-backup/12-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:51 | demand-backup/12-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 06:18:51 | demand-backup/12-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:51 | demand-backup/12-delete-data | + local pod= logger.go:42: 06:18:51 | demand-backup/12-delete-data | ++ get_client_pod logger.go:42: 06:18:51 | demand-backup/12-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:18:52 | demand-backup/12-delete-data | + client_pod=mysql-client logger.go:42: 06:18:52 | demand-backup/12-delete-data | + wait_pod mysql-client logger.go:42: 06:18:52 | demand-backup/12-delete-data | + local pod=mysql-client logger.go:42: 06:18:52 | demand-backup/12-delete-data | + set +o xtrace logger.go:42: 06:18:52 | demand-backup/12-delete-data | mysql-clienttrue logger.go:42: 06:18:52 | demand-backup/12-delete-data | + kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:52 | demand-backup/12-delete-data | + sed -e 's/mysql: //' logger.go:42: 06:18:52 | demand-backup/12-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:18:53 | demand-backup/12-delete-data | + : logger.go:42: 06:18:53 | demand-backup/12-delete-data | ++ get_cluster_name logger.go:42: 06:18:53 | demand-backup/12-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:18:53 | demand-backup/12-delete-data | + cluster_name=demand-backup logger.go:42: 06:18:53 | demand-backup/12-delete-data | + for i in 0 1 2 logger.go:42: 06:18:53 | demand-backup/12-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:53 | demand-backup/12-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:18:53 | demand-backup/12-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:53 | demand-backup/12-delete-data | ++ local pod= logger.go:42: 06:18:53 | demand-backup/12-delete-data | +++ get_client_pod logger.go:42: 06:18:53 | demand-backup/12-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:18:54 | demand-backup/12-delete-data | ++ client_pod=mysql-client logger.go:42: 06:18:54 | demand-backup/12-delete-data | ++ wait_pod mysql-client logger.go:42: 06:18:54 | demand-backup/12-delete-data | ++ local pod=mysql-client logger.go:42: 06:18:54 | demand-backup/12-delete-data | ++ set +o xtrace logger.go:42: 06:18:54 | demand-backup/12-delete-data | mysql-clienttrue logger.go:42: 06:18:54 | demand-backup/12-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:54 | demand-backup/12-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:18:54 | demand-backup/12-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:18:55 | demand-backup/12-delete-data | ++ : logger.go:42: 06:18:55 | demand-backup/12-delete-data | + data= logger.go:42: 06:18:55 | demand-backup/12-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 04-delete-data-minio-backup-source-0 --from-literal=data= logger.go:42: 06:18:56 | demand-backup/12-delete-data | configmap/04-delete-data-minio-backup-source-0 created logger.go:42: 06:18:56 | demand-backup/12-delete-data | + for i in 0 1 2 logger.go:42: 06:18:56 | demand-backup/12-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:56 | demand-backup/12-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:18:56 | demand-backup/12-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:56 | demand-backup/12-delete-data | ++ local pod= logger.go:42: 06:18:56 | demand-backup/12-delete-data | +++ get_client_pod logger.go:42: 06:18:56 | demand-backup/12-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:18:56 | demand-backup/12-delete-data | ++ client_pod=mysql-client logger.go:42: 06:18:56 | demand-backup/12-delete-data | ++ wait_pod mysql-client logger.go:42: 06:18:56 | demand-backup/12-delete-data | ++ local pod=mysql-client logger.go:42: 06:18:56 | demand-backup/12-delete-data | ++ set +o xtrace logger.go:42: 06:18:57 | demand-backup/12-delete-data | mysql-clienttrue logger.go:42: 06:18:57 | demand-backup/12-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:57 | demand-backup/12-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:18:57 | demand-backup/12-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:18:57 | demand-backup/12-delete-data | ++ : logger.go:42: 06:18:57 | demand-backup/12-delete-data | + data= logger.go:42: 06:18:57 | demand-backup/12-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 04-delete-data-minio-backup-source-1 --from-literal=data= logger.go:42: 06:18:58 | demand-backup/12-delete-data | configmap/04-delete-data-minio-backup-source-1 created logger.go:42: 06:18:58 | demand-backup/12-delete-data | + for i in 0 1 2 logger.go:42: 06:18:58 | demand-backup/12-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:58 | demand-backup/12-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:18:58 | demand-backup/12-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:58 | demand-backup/12-delete-data | ++ local pod= logger.go:42: 06:18:58 | demand-backup/12-delete-data | +++ get_client_pod logger.go:42: 06:18:58 | demand-backup/12-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:18:58 | demand-backup/12-delete-data | ++ client_pod=mysql-client logger.go:42: 06:18:58 | demand-backup/12-delete-data | ++ wait_pod mysql-client logger.go:42: 06:18:58 | demand-backup/12-delete-data | ++ local pod=mysql-client logger.go:42: 06:18:58 | demand-backup/12-delete-data | ++ set +o xtrace logger.go:42: 06:18:59 | demand-backup/12-delete-data | mysql-clienttrue logger.go:42: 06:18:59 | demand-backup/12-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:18:59 | demand-backup/12-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:18:59 | demand-backup/12-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:19:00 | demand-backup/12-delete-data | ++ : logger.go:42: 06:19:00 | demand-backup/12-delete-data | + data= logger.go:42: 06:19:00 | demand-backup/12-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 04-delete-data-minio-backup-source-2 --from-literal=data= logger.go:42: 06:19:00 | demand-backup/12-delete-data | configmap/04-delete-data-minio-backup-source-2 created logger.go:42: 06:19:01 | demand-backup/12-delete-data | test step completed 12-delete-data logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | starting test step 13-restore-from-minio-backup-source logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | running command: [sh -c set -o errexit set -o xtrace source ../../functions storage_name="minio" backup_name="demand-backup-minio" restore_name="demand-backup-restore-minio-backup-source" cluster_name="${test_name}${name_suffix:+-$name_suffix}" destination=$(kubectl -n "${NAMESPACE}" get ps-backup "${backup_name}" -o jsonpath='{.status.destination}') cat "${DEPLOY_DIR}/restore.yaml" \ | yq eval "$(printf '.metadata.name="%s"' "${restore_name}")" - \ | yq eval "$(printf '.spec.clusterName="%s"' "${cluster_name}")" - \ | yq eval "del(.spec.backupName)" - \ | yq eval "$(printf '.spec.backupSource.destination="%s"' "${destination}")" - \ | yq eval '.spec.backupSource.storage.type="s3"' - \ | yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - \ | yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - \ | yq eval "$(printf '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.%s:9000"' "${NAMESPACE}")" - \ | yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - \ | kubectl apply -n "${NAMESPACE}" -f -] logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | + source ../../functions logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ realpath ../../.. logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | ++++ pwd logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | ++ test_name=demand-backup logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ MYSQL_VERSION=8.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export MINIO_VER=5.4.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ MINIO_VER=5.4.0 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ export VAULT_VER=0.16.1 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ VAULT_VER=0.16.1 logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | ++++ which gdate logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | ++++ which date logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ date=/usr/sbin/date logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ oc get projects logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ : logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ kubectl get nodes logger.go:42: 06:19:01 | demand-backup/13-restore-from-minio-backup-source | +++ grep '^minikube' logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | ++ oc get projects logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | +++ kubectl version -o json logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | +++ grep '\-eks\-' logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | grep: warning: stray \ before - logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | ++ '[' ']' logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | ++ EKS=0 logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | + storage_name=minio logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | + backup_name=demand-backup-minio logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | + restore_name=demand-backup-restore-minio-backup-source logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | + cluster_name=demand-backup logger.go:42: 06:19:02 | demand-backup/13-restore-from-minio-backup-source | ++ kubectl -n kuttl-test-safe-ladybug get ps-backup demand-backup-minio -o 'jsonpath={.status.destination}' logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + destination=s3://operator-testing/demand-backup-2025-09-11-06:13:58-full logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + cat /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy/restore.yaml logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval 'del(.spec.backupName)' - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.type="s3"' - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | ++ printf '.spec.clusterName="%s"' demand-backup logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | ++ printf '.metadata.name="%s"' demand-backup-restore-minio-backup-source logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | ++ printf '.spec.backupSource.destination="%s"' s3://operator-testing/demand-backup-2025-09-11-06:13:58-full logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval '.metadata.name="demand-backup-restore-minio-backup-source"' - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval '.spec.clusterName="demand-backup"' - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + kubectl apply -n kuttl-test-safe-ladybug -f - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval '.spec.backupSource.destination="s3://operator-testing/demand-backup-2025-09-11-06:13:58-full"' - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | ++ printf '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.%s:9000"' kuttl-test-safe-ladybug logger.go:42: 06:19:03 | demand-backup/13-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.kuttl-test-safe-ladybug:9000"' - logger.go:42: 06:19:04 | demand-backup/13-restore-from-minio-backup-source | perconaservermysqlrestore.ps.percona.com/demand-backup-restore-minio-backup-source created logger.go:42: 06:23:19 | demand-backup/13-restore-from-minio-backup-source | test step completed 13-restore-from-minio-backup-source logger.go:42: 06:23:19 | demand-backup/14-read-data | starting test step 14-read-data logger.go:42: 06:23:19 | demand-backup/14-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 09-read-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 06:23:19 | demand-backup/14-read-data | + source ../../functions logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ realpath ../../.. logger.go:42: 06:23:19 | demand-backup/14-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:23:19 | demand-backup/14-read-data | ++++ pwd logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:23:19 | demand-backup/14-read-data | ++ test_name=demand-backup logger.go:42: 06:23:19 | demand-backup/14-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:23:19 | demand-backup/14-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:23:19 | demand-backup/14-read-data | ++++ which gdate logger.go:42: 06:23:19 | demand-backup/14-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:23:19 | demand-backup/14-read-data | ++++ which date logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ date=/usr/sbin/date logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ oc get projects logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ : logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ kubectl get nodes logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ grep '^minikube' logger.go:42: 06:23:19 | demand-backup/14-read-data | ++ oc get projects logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ kubectl version -o json logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:23:19 | demand-backup/14-read-data | +++ grep '\-eks\-' logger.go:42: 06:23:19 | demand-backup/14-read-data | grep: warning: stray \ before - logger.go:42: 06:23:20 | demand-backup/14-read-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:23:20 | demand-backup/14-read-data | ++ '[' ']' logger.go:42: 06:23:20 | demand-backup/14-read-data | ++ EKS=0 logger.go:42: 06:23:20 | demand-backup/14-read-data | ++ get_user_pass root logger.go:42: 06:23:20 | demand-backup/14-read-data | ++ local user=root logger.go:42: 06:23:20 | demand-backup/14-read-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:23:20 | demand-backup/14-read-data | ++ base64 --decode logger.go:42: 06:23:20 | demand-backup/14-read-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:23:20 | demand-backup/14-read-data | ++ get_cluster_name logger.go:42: 06:23:20 | demand-backup/14-read-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:23:21 | demand-backup/14-read-data | + cluster_name=demand-backup logger.go:42: 06:23:21 | demand-backup/14-read-data | + for i in 0 1 2 logger.go:42: 06:23:21 | demand-backup/14-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:21 | demand-backup/14-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:23:21 | demand-backup/14-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:21 | demand-backup/14-read-data | ++ local pod= logger.go:42: 06:23:21 | demand-backup/14-read-data | +++ get_client_pod logger.go:42: 06:23:21 | demand-backup/14-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:23:21 | demand-backup/14-read-data | ++ client_pod=mysql-client logger.go:42: 06:23:21 | demand-backup/14-read-data | ++ wait_pod mysql-client logger.go:42: 06:23:21 | demand-backup/14-read-data | ++ local pod=mysql-client logger.go:42: 06:23:21 | demand-backup/14-read-data | ++ set +o xtrace logger.go:42: 06:23:22 | demand-backup/14-read-data | mysql-clienttrue logger.go:42: 06:23:22 | demand-backup/14-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:22 | demand-backup/14-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:23:22 | demand-backup/14-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:23:22 | demand-backup/14-read-data | + data=100500 logger.go:42: 06:23:22 | demand-backup/14-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 09-read-data-minio-backup-source-0 --from-literal=data=100500 logger.go:42: 06:23:23 | demand-backup/14-read-data | configmap/09-read-data-minio-backup-source-0 created logger.go:42: 06:23:23 | demand-backup/14-read-data | + for i in 0 1 2 logger.go:42: 06:23:23 | demand-backup/14-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:23 | demand-backup/14-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:23:23 | demand-backup/14-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:23 | demand-backup/14-read-data | ++ local pod= logger.go:42: 06:23:23 | demand-backup/14-read-data | +++ get_client_pod logger.go:42: 06:23:23 | demand-backup/14-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:23:23 | demand-backup/14-read-data | ++ client_pod=mysql-client logger.go:42: 06:23:23 | demand-backup/14-read-data | ++ wait_pod mysql-client logger.go:42: 06:23:23 | demand-backup/14-read-data | ++ local pod=mysql-client logger.go:42: 06:23:23 | demand-backup/14-read-data | ++ set +o xtrace logger.go:42: 06:23:24 | demand-backup/14-read-data | mysql-clienttrue logger.go:42: 06:23:24 | demand-backup/14-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:24 | demand-backup/14-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:23:24 | demand-backup/14-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:23:25 | demand-backup/14-read-data | + data=100500 logger.go:42: 06:23:25 | demand-backup/14-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 09-read-data-minio-backup-source-1 --from-literal=data=100500 logger.go:42: 06:23:25 | demand-backup/14-read-data | configmap/09-read-data-minio-backup-source-1 created logger.go:42: 06:23:25 | demand-backup/14-read-data | + for i in 0 1 2 logger.go:42: 06:23:25 | demand-backup/14-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:25 | demand-backup/14-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:23:25 | demand-backup/14-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:25 | demand-backup/14-read-data | ++ local pod= logger.go:42: 06:23:25 | demand-backup/14-read-data | +++ get_client_pod logger.go:42: 06:23:25 | demand-backup/14-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:23:26 | demand-backup/14-read-data | ++ client_pod=mysql-client logger.go:42: 06:23:26 | demand-backup/14-read-data | ++ wait_pod mysql-client logger.go:42: 06:23:26 | demand-backup/14-read-data | ++ local pod=mysql-client logger.go:42: 06:23:26 | demand-backup/14-read-data | ++ set +o xtrace logger.go:42: 06:23:26 | demand-backup/14-read-data | mysql-clienttrue logger.go:42: 06:23:26 | demand-backup/14-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:26 | demand-backup/14-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:23:26 | demand-backup/14-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:23:27 | demand-backup/14-read-data | + data=100500 logger.go:42: 06:23:27 | demand-backup/14-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 09-read-data-minio-backup-source-2 --from-literal=data=100500 logger.go:42: 06:23:27 | demand-backup/14-read-data | configmap/09-read-data-minio-backup-source-2 created logger.go:42: 06:23:28 | demand-backup/14-read-data | test step completed 14-read-data logger.go:42: 06:23:28 | demand-backup/15-create-backup-s3 | starting test step 15-create-backup-s3 logger.go:42: 06:23:29 | demand-backup/15-create-backup-s3 | PerconaServerMySQLBackup:kuttl-test-safe-ladybug/demand-backup-s3 created logger.go:42: 06:23:42 | demand-backup/15-create-backup-s3 | test step completed 15-create-backup-s3 logger.go:42: 06:23:42 | demand-backup/16-delete-data | starting test step 16-delete-data logger.go:42: 06:23:42 | demand-backup/16-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -p'$password'" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 08-delete-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 06:23:42 | demand-backup/16-delete-data | + source ../../functions logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ realpath ../../.. logger.go:42: 06:23:42 | demand-backup/16-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:23:42 | demand-backup/16-delete-data | ++++ pwd logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:23:42 | demand-backup/16-delete-data | ++ test_name=demand-backup logger.go:42: 06:23:42 | demand-backup/16-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:23:42 | demand-backup/16-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:23:42 | demand-backup/16-delete-data | ++++ which gdate logger.go:42: 06:23:42 | demand-backup/16-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:23:42 | demand-backup/16-delete-data | ++++ which date logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ date=/usr/sbin/date logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ oc get projects logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ : logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ kubectl get nodes logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ grep '^minikube' logger.go:42: 06:23:42 | demand-backup/16-delete-data | ++ oc get projects logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ kubectl version -o json logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:23:42 | demand-backup/16-delete-data | +++ grep '\-eks\-' logger.go:42: 06:23:42 | demand-backup/16-delete-data | grep: warning: stray \ before - logger.go:42: 06:23:43 | demand-backup/16-delete-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:23:43 | demand-backup/16-delete-data | ++ '[' ']' logger.go:42: 06:23:43 | demand-backup/16-delete-data | ++ EKS=0 logger.go:42: 06:23:43 | demand-backup/16-delete-data | ++ get_user_pass root logger.go:42: 06:23:43 | demand-backup/16-delete-data | ++ local user=root logger.go:42: 06:23:43 | demand-backup/16-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:23:43 | demand-backup/16-delete-data | ++ base64 --decode logger.go:42: 06:23:43 | demand-backup/16-delete-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:23:43 | demand-backup/16-delete-data | +++ get_cluster_name logger.go:42: 06:23:43 | demand-backup/16-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:23:44 | demand-backup/16-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 06:23:44 | demand-backup/16-delete-data | ++ local cluster=demand-backup logger.go:42: 06:23:44 | demand-backup/16-delete-data | ++ echo demand-backup-haproxy logger.go:42: 06:23:44 | demand-backup/16-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:44 | demand-backup/16-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 06:23:44 | demand-backup/16-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:44 | demand-backup/16-delete-data | + local pod= logger.go:42: 06:23:44 | demand-backup/16-delete-data | ++ get_client_pod logger.go:42: 06:23:44 | demand-backup/16-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:23:44 | demand-backup/16-delete-data | + client_pod=mysql-client logger.go:42: 06:23:44 | demand-backup/16-delete-data | + wait_pod mysql-client logger.go:42: 06:23:44 | demand-backup/16-delete-data | + local pod=mysql-client logger.go:42: 06:23:44 | demand-backup/16-delete-data | + set +o xtrace logger.go:42: 06:23:45 | demand-backup/16-delete-data | mysql-clienttrue logger.go:42: 06:23:45 | demand-backup/16-delete-data | + kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:45 | demand-backup/16-delete-data | + sed -e 's/mysql: //' logger.go:42: 06:23:45 | demand-backup/16-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:23:46 | demand-backup/16-delete-data | + : logger.go:42: 06:23:46 | demand-backup/16-delete-data | ++ get_cluster_name logger.go:42: 06:23:46 | demand-backup/16-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:23:46 | demand-backup/16-delete-data | + cluster_name=demand-backup logger.go:42: 06:23:46 | demand-backup/16-delete-data | + for i in 0 1 2 logger.go:42: 06:23:46 | demand-backup/16-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:46 | demand-backup/16-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:23:46 | demand-backup/16-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:46 | demand-backup/16-delete-data | ++ local pod= logger.go:42: 06:23:46 | demand-backup/16-delete-data | +++ get_client_pod logger.go:42: 06:23:46 | demand-backup/16-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:23:47 | demand-backup/16-delete-data | ++ client_pod=mysql-client logger.go:42: 06:23:47 | demand-backup/16-delete-data | ++ wait_pod mysql-client logger.go:42: 06:23:47 | demand-backup/16-delete-data | ++ local pod=mysql-client logger.go:42: 06:23:47 | demand-backup/16-delete-data | ++ set +o xtrace logger.go:42: 06:23:47 | demand-backup/16-delete-data | mysql-clienttrue logger.go:42: 06:23:47 | demand-backup/16-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:47 | demand-backup/16-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:23:47 | demand-backup/16-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:23:48 | demand-backup/16-delete-data | ++ : logger.go:42: 06:23:48 | demand-backup/16-delete-data | + data= logger.go:42: 06:23:48 | demand-backup/16-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 08-delete-data-s3-0 --from-literal=data= logger.go:42: 06:23:49 | demand-backup/16-delete-data | configmap/08-delete-data-s3-0 created logger.go:42: 06:23:49 | demand-backup/16-delete-data | + for i in 0 1 2 logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ local pod= logger.go:42: 06:23:49 | demand-backup/16-delete-data | +++ get_client_pod logger.go:42: 06:23:49 | demand-backup/16-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ client_pod=mysql-client logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ wait_pod mysql-client logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ local pod=mysql-client logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ set +o xtrace logger.go:42: 06:23:49 | demand-backup/16-delete-data | mysql-clienttrue logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:23:49 | demand-backup/16-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:23:50 | demand-backup/16-delete-data | ++ : logger.go:42: 06:23:50 | demand-backup/16-delete-data | + data= logger.go:42: 06:23:50 | demand-backup/16-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 08-delete-data-s3-1 --from-literal=data= logger.go:42: 06:23:51 | demand-backup/16-delete-data | configmap/08-delete-data-s3-1 created logger.go:42: 06:23:51 | demand-backup/16-delete-data | + for i in 0 1 2 logger.go:42: 06:23:51 | demand-backup/16-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:51 | demand-backup/16-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:23:51 | demand-backup/16-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:51 | demand-backup/16-delete-data | ++ local pod= logger.go:42: 06:23:51 | demand-backup/16-delete-data | +++ get_client_pod logger.go:42: 06:23:51 | demand-backup/16-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:23:51 | demand-backup/16-delete-data | ++ client_pod=mysql-client logger.go:42: 06:23:51 | demand-backup/16-delete-data | ++ wait_pod mysql-client logger.go:42: 06:23:51 | demand-backup/16-delete-data | ++ local pod=mysql-client logger.go:42: 06:23:51 | demand-backup/16-delete-data | ++ set +o xtrace logger.go:42: 06:23:52 | demand-backup/16-delete-data | mysql-clienttrue logger.go:42: 06:23:52 | demand-backup/16-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:23:52 | demand-backup/16-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:23:52 | demand-backup/16-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:23:53 | demand-backup/16-delete-data | ++ : logger.go:42: 06:23:53 | demand-backup/16-delete-data | + data= logger.go:42: 06:23:53 | demand-backup/16-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 08-delete-data-s3-2 --from-literal=data= logger.go:42: 06:23:53 | demand-backup/16-delete-data | configmap/08-delete-data-s3-2 created logger.go:42: 06:23:54 | demand-backup/16-delete-data | test step completed 16-delete-data logger.go:42: 06:23:54 | demand-backup/17-restore-from-s3 | starting test step 17-restore-from-s3 logger.go:42: 06:23:55 | demand-backup/17-restore-from-s3 | PerconaServerMySQLRestore:kuttl-test-safe-ladybug/demand-backup-restore-s3 created logger.go:42: 06:28:07 | demand-backup/17-restore-from-s3 | test step completed 17-restore-from-s3 logger.go:42: 06:28:07 | demand-backup/18-read-data | starting test step 18-read-data logger.go:42: 06:28:07 | demand-backup/18-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 06-read-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 06:28:07 | demand-backup/18-read-data | + source ../../functions logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ realpath ../../.. logger.go:42: 06:28:07 | demand-backup/18-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:28:07 | demand-backup/18-read-data | ++++ pwd logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:28:07 | demand-backup/18-read-data | ++ test_name=demand-backup logger.go:42: 06:28:07 | demand-backup/18-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:28:07 | demand-backup/18-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:28:07 | demand-backup/18-read-data | ++++ which gdate logger.go:42: 06:28:07 | demand-backup/18-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:28:07 | demand-backup/18-read-data | ++++ which date logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ date=/usr/sbin/date logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ oc get projects logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ : logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ kubectl get nodes logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ grep '^minikube' logger.go:42: 06:28:07 | demand-backup/18-read-data | ++ oc get projects logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ kubectl version -o json logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ grep '\-eks\-' logger.go:42: 06:28:07 | demand-backup/18-read-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:28:07 | demand-backup/18-read-data | grep: warning: stray \ before - logger.go:42: 06:28:08 | demand-backup/18-read-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:28:08 | demand-backup/18-read-data | ++ '[' ']' logger.go:42: 06:28:08 | demand-backup/18-read-data | ++ EKS=0 logger.go:42: 06:28:08 | demand-backup/18-read-data | ++ get_user_pass root logger.go:42: 06:28:08 | demand-backup/18-read-data | ++ local user=root logger.go:42: 06:28:08 | demand-backup/18-read-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:28:08 | demand-backup/18-read-data | ++ base64 --decode logger.go:42: 06:28:09 | demand-backup/18-read-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:28:09 | demand-backup/18-read-data | ++ get_cluster_name logger.go:42: 06:28:09 | demand-backup/18-read-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:28:09 | demand-backup/18-read-data | + cluster_name=demand-backup logger.go:42: 06:28:09 | demand-backup/18-read-data | + for i in 0 1 2 logger.go:42: 06:28:09 | demand-backup/18-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:09 | demand-backup/18-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:28:09 | demand-backup/18-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:09 | demand-backup/18-read-data | ++ local pod= logger.go:42: 06:28:09 | demand-backup/18-read-data | +++ get_client_pod logger.go:42: 06:28:09 | demand-backup/18-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:28:10 | demand-backup/18-read-data | ++ client_pod=mysql-client logger.go:42: 06:28:10 | demand-backup/18-read-data | ++ wait_pod mysql-client logger.go:42: 06:28:10 | demand-backup/18-read-data | ++ local pod=mysql-client logger.go:42: 06:28:10 | demand-backup/18-read-data | ++ set +o xtrace logger.go:42: 06:28:10 | demand-backup/18-read-data | mysql-clienttrue logger.go:42: 06:28:10 | demand-backup/18-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:10 | demand-backup/18-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:28:10 | demand-backup/18-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:28:11 | demand-backup/18-read-data | + data=100500 logger.go:42: 06:28:11 | demand-backup/18-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-s3-0 --from-literal=data=100500 logger.go:42: 06:28:11 | demand-backup/18-read-data | configmap/06-read-data-s3-0 created logger.go:42: 06:28:11 | demand-backup/18-read-data | + for i in 0 1 2 logger.go:42: 06:28:11 | demand-backup/18-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:11 | demand-backup/18-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:28:11 | demand-backup/18-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:11 | demand-backup/18-read-data | ++ local pod= logger.go:42: 06:28:11 | demand-backup/18-read-data | +++ get_client_pod logger.go:42: 06:28:11 | demand-backup/18-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:28:12 | demand-backup/18-read-data | ++ client_pod=mysql-client logger.go:42: 06:28:12 | demand-backup/18-read-data | ++ wait_pod mysql-client logger.go:42: 06:28:12 | demand-backup/18-read-data | ++ local pod=mysql-client logger.go:42: 06:28:12 | demand-backup/18-read-data | ++ set +o xtrace logger.go:42: 06:28:12 | demand-backup/18-read-data | mysql-clienttrue logger.go:42: 06:28:12 | demand-backup/18-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:12 | demand-backup/18-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:28:12 | demand-backup/18-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:28:13 | demand-backup/18-read-data | + data=100500 logger.go:42: 06:28:13 | demand-backup/18-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-s3-1 --from-literal=data=100500 logger.go:42: 06:28:14 | demand-backup/18-read-data | configmap/06-read-data-s3-1 created logger.go:42: 06:28:14 | demand-backup/18-read-data | + for i in 0 1 2 logger.go:42: 06:28:14 | demand-backup/18-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:14 | demand-backup/18-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:28:14 | demand-backup/18-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:14 | demand-backup/18-read-data | ++ local pod= logger.go:42: 06:28:14 | demand-backup/18-read-data | +++ get_client_pod logger.go:42: 06:28:14 | demand-backup/18-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:28:14 | demand-backup/18-read-data | ++ client_pod=mysql-client logger.go:42: 06:28:14 | demand-backup/18-read-data | ++ wait_pod mysql-client logger.go:42: 06:28:14 | demand-backup/18-read-data | ++ local pod=mysql-client logger.go:42: 06:28:14 | demand-backup/18-read-data | ++ set +o xtrace logger.go:42: 06:28:15 | demand-backup/18-read-data | mysql-clienttrue logger.go:42: 06:28:15 | demand-backup/18-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:15 | demand-backup/18-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:28:15 | demand-backup/18-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:28:16 | demand-backup/18-read-data | + data=100500 logger.go:42: 06:28:16 | demand-backup/18-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-s3-2 --from-literal=data=100500 logger.go:42: 06:28:16 | demand-backup/18-read-data | configmap/06-read-data-s3-2 created logger.go:42: 06:28:17 | demand-backup/18-read-data | test step completed 18-read-data logger.go:42: 06:28:17 | demand-backup/19-create-backup-gcp | starting test step 19-create-backup-gcp logger.go:42: 06:28:18 | demand-backup/19-create-backup-gcp | PerconaServerMySQLBackup:kuttl-test-safe-ladybug/demand-backup-gcp created logger.go:42: 06:28:29 | demand-backup/19-create-backup-gcp | test step completed 19-create-backup-gcp logger.go:42: 06:28:29 | demand-backup/20-delete-data | starting test step 20-delete-data logger.go:42: 06:28:29 | demand-backup/20-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -p'$password'" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 12-delete-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 06:28:29 | demand-backup/20-delete-data | + source ../../functions logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ realpath ../../.. logger.go:42: 06:28:29 | demand-backup/20-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:28:29 | demand-backup/20-delete-data | ++++ pwd logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:28:29 | demand-backup/20-delete-data | ++ test_name=demand-backup logger.go:42: 06:28:29 | demand-backup/20-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:28:29 | demand-backup/20-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:28:29 | demand-backup/20-delete-data | ++++ which gdate logger.go:42: 06:28:29 | demand-backup/20-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:28:29 | demand-backup/20-delete-data | ++++ which date logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ date=/usr/sbin/date logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ oc get projects logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ : logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ kubectl get nodes logger.go:42: 06:28:29 | demand-backup/20-delete-data | +++ grep '^minikube' logger.go:42: 06:28:30 | demand-backup/20-delete-data | ++ oc get projects logger.go:42: 06:28:30 | demand-backup/20-delete-data | +++ kubectl version -o json logger.go:42: 06:28:30 | demand-backup/20-delete-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:28:30 | demand-backup/20-delete-data | +++ grep '\-eks\-' logger.go:42: 06:28:30 | demand-backup/20-delete-data | grep: warning: stray \ before - logger.go:42: 06:28:30 | demand-backup/20-delete-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:28:30 | demand-backup/20-delete-data | ++ '[' ']' logger.go:42: 06:28:30 | demand-backup/20-delete-data | ++ EKS=0 logger.go:42: 06:28:30 | demand-backup/20-delete-data | ++ get_user_pass root logger.go:42: 06:28:30 | demand-backup/20-delete-data | ++ local user=root logger.go:42: 06:28:30 | demand-backup/20-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:28:30 | demand-backup/20-delete-data | ++ base64 --decode logger.go:42: 06:28:31 | demand-backup/20-delete-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:28:31 | demand-backup/20-delete-data | +++ get_cluster_name logger.go:42: 06:28:31 | demand-backup/20-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:28:31 | demand-backup/20-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 06:28:31 | demand-backup/20-delete-data | ++ local cluster=demand-backup logger.go:42: 06:28:31 | demand-backup/20-delete-data | ++ echo demand-backup-haproxy logger.go:42: 06:28:31 | demand-backup/20-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:31 | demand-backup/20-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 06:28:31 | demand-backup/20-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:31 | demand-backup/20-delete-data | + local pod= logger.go:42: 06:28:31 | demand-backup/20-delete-data | ++ get_client_pod logger.go:42: 06:28:31 | demand-backup/20-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:28:32 | demand-backup/20-delete-data | + client_pod=mysql-client logger.go:42: 06:28:32 | demand-backup/20-delete-data | + wait_pod mysql-client logger.go:42: 06:28:32 | demand-backup/20-delete-data | + local pod=mysql-client logger.go:42: 06:28:32 | demand-backup/20-delete-data | + set +o xtrace logger.go:42: 06:28:32 | demand-backup/20-delete-data | mysql-clienttrue logger.go:42: 06:28:32 | demand-backup/20-delete-data | + kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:32 | demand-backup/20-delete-data | + sed -e 's/mysql: //' logger.go:42: 06:28:32 | demand-backup/20-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:28:33 | demand-backup/20-delete-data | + : logger.go:42: 06:28:33 | demand-backup/20-delete-data | ++ get_cluster_name logger.go:42: 06:28:33 | demand-backup/20-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:28:34 | demand-backup/20-delete-data | + cluster_name=demand-backup logger.go:42: 06:28:34 | demand-backup/20-delete-data | + for i in 0 1 2 logger.go:42: 06:28:34 | demand-backup/20-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:34 | demand-backup/20-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:28:34 | demand-backup/20-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:34 | demand-backup/20-delete-data | ++ local pod= logger.go:42: 06:28:34 | demand-backup/20-delete-data | +++ get_client_pod logger.go:42: 06:28:34 | demand-backup/20-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:28:34 | demand-backup/20-delete-data | ++ client_pod=mysql-client logger.go:42: 06:28:34 | demand-backup/20-delete-data | ++ wait_pod mysql-client logger.go:42: 06:28:34 | demand-backup/20-delete-data | ++ local pod=mysql-client logger.go:42: 06:28:34 | demand-backup/20-delete-data | ++ set +o xtrace logger.go:42: 06:28:35 | demand-backup/20-delete-data | mysql-clienttrue logger.go:42: 06:28:35 | demand-backup/20-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:35 | demand-backup/20-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:28:35 | demand-backup/20-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:28:35 | demand-backup/20-delete-data | ++ : logger.go:42: 06:28:35 | demand-backup/20-delete-data | + data= logger.go:42: 06:28:35 | demand-backup/20-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 12-delete-data-gcp-0 --from-literal=data= logger.go:42: 06:28:36 | demand-backup/20-delete-data | configmap/12-delete-data-gcp-0 created logger.go:42: 06:28:36 | demand-backup/20-delete-data | + for i in 0 1 2 logger.go:42: 06:28:36 | demand-backup/20-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:36 | demand-backup/20-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:28:36 | demand-backup/20-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:36 | demand-backup/20-delete-data | ++ local pod= logger.go:42: 06:28:36 | demand-backup/20-delete-data | +++ get_client_pod logger.go:42: 06:28:36 | demand-backup/20-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:28:36 | demand-backup/20-delete-data | ++ client_pod=mysql-client logger.go:42: 06:28:36 | demand-backup/20-delete-data | ++ wait_pod mysql-client logger.go:42: 06:28:36 | demand-backup/20-delete-data | ++ local pod=mysql-client logger.go:42: 06:28:36 | demand-backup/20-delete-data | ++ set +o xtrace logger.go:42: 06:28:37 | demand-backup/20-delete-data | mysql-clienttrue logger.go:42: 06:28:37 | demand-backup/20-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:37 | demand-backup/20-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:28:37 | demand-backup/20-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:28:38 | demand-backup/20-delete-data | ++ : logger.go:42: 06:28:38 | demand-backup/20-delete-data | + data= logger.go:42: 06:28:38 | demand-backup/20-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 12-delete-data-gcp-1 --from-literal=data= logger.go:42: 06:28:38 | demand-backup/20-delete-data | configmap/12-delete-data-gcp-1 created logger.go:42: 06:28:38 | demand-backup/20-delete-data | + for i in 0 1 2 logger.go:42: 06:28:38 | demand-backup/20-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:38 | demand-backup/20-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:28:38 | demand-backup/20-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:38 | demand-backup/20-delete-data | ++ local pod= logger.go:42: 06:28:38 | demand-backup/20-delete-data | +++ get_client_pod logger.go:42: 06:28:38 | demand-backup/20-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:28:39 | demand-backup/20-delete-data | ++ client_pod=mysql-client logger.go:42: 06:28:39 | demand-backup/20-delete-data | ++ wait_pod mysql-client logger.go:42: 06:28:39 | demand-backup/20-delete-data | ++ local pod=mysql-client logger.go:42: 06:28:39 | demand-backup/20-delete-data | ++ set +o xtrace logger.go:42: 06:28:39 | demand-backup/20-delete-data | mysql-clienttrue logger.go:42: 06:28:39 | demand-backup/20-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:28:39 | demand-backup/20-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:28:39 | demand-backup/20-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:28:40 | demand-backup/20-delete-data | ++ : logger.go:42: 06:28:40 | demand-backup/20-delete-data | + data= logger.go:42: 06:28:40 | demand-backup/20-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 12-delete-data-gcp-2 --from-literal=data= logger.go:42: 06:28:40 | demand-backup/20-delete-data | configmap/12-delete-data-gcp-2 created logger.go:42: 06:28:41 | demand-backup/20-delete-data | test step completed 20-delete-data logger.go:42: 06:28:41 | demand-backup/21-restore-from-gcp | starting test step 21-restore-from-gcp logger.go:42: 06:28:42 | demand-backup/21-restore-from-gcp | PerconaServerMySQLRestore:kuttl-test-safe-ladybug/demand-backup-restore-gcp created logger.go:42: 06:32:51 | demand-backup/21-restore-from-gcp | test step completed 21-restore-from-gcp logger.go:42: 06:32:51 | demand-backup/22-read-data | starting test step 22-read-data logger.go:42: 06:32:51 | demand-backup/22-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 06-read-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 06:32:51 | demand-backup/22-read-data | + source ../../functions logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ realpath ../../.. logger.go:42: 06:32:51 | demand-backup/22-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:32:51 | demand-backup/22-read-data | ++++ pwd logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:32:51 | demand-backup/22-read-data | ++ test_name=demand-backup logger.go:42: 06:32:51 | demand-backup/22-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:32:51 | demand-backup/22-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:32:51 | demand-backup/22-read-data | ++++ which gdate logger.go:42: 06:32:51 | demand-backup/22-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:32:51 | demand-backup/22-read-data | ++++ which date logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ date=/usr/sbin/date logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ oc get projects logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ : logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ kubectl get nodes logger.go:42: 06:32:51 | demand-backup/22-read-data | +++ grep '^minikube' logger.go:42: 06:32:52 | demand-backup/22-read-data | ++ oc get projects logger.go:42: 06:32:52 | demand-backup/22-read-data | +++ kubectl version -o json logger.go:42: 06:32:52 | demand-backup/22-read-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:32:52 | demand-backup/22-read-data | +++ grep '\-eks\-' logger.go:42: 06:32:52 | demand-backup/22-read-data | grep: warning: stray \ before - logger.go:42: 06:32:52 | demand-backup/22-read-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:32:52 | demand-backup/22-read-data | ++ '[' ']' logger.go:42: 06:32:52 | demand-backup/22-read-data | ++ EKS=0 logger.go:42: 06:32:52 | demand-backup/22-read-data | ++ get_user_pass root logger.go:42: 06:32:52 | demand-backup/22-read-data | ++ local user=root logger.go:42: 06:32:52 | demand-backup/22-read-data | ++ base64 --decode logger.go:42: 06:32:52 | demand-backup/22-read-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:32:53 | demand-backup/22-read-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:32:53 | demand-backup/22-read-data | ++ get_cluster_name logger.go:42: 06:32:53 | demand-backup/22-read-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:32:53 | demand-backup/22-read-data | + cluster_name=demand-backup logger.go:42: 06:32:53 | demand-backup/22-read-data | + for i in 0 1 2 logger.go:42: 06:32:53 | demand-backup/22-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:53 | demand-backup/22-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:32:53 | demand-backup/22-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:53 | demand-backup/22-read-data | ++ local pod= logger.go:42: 06:32:53 | demand-backup/22-read-data | +++ get_client_pod logger.go:42: 06:32:53 | demand-backup/22-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:32:54 | demand-backup/22-read-data | ++ client_pod=mysql-client logger.go:42: 06:32:54 | demand-backup/22-read-data | ++ wait_pod mysql-client logger.go:42: 06:32:54 | demand-backup/22-read-data | ++ local pod=mysql-client logger.go:42: 06:32:54 | demand-backup/22-read-data | ++ set +o xtrace logger.go:42: 06:32:54 | demand-backup/22-read-data | mysql-clienttrue logger.go:42: 06:32:54 | demand-backup/22-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:54 | demand-backup/22-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:32:54 | demand-backup/22-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:32:55 | demand-backup/22-read-data | + data=100500 logger.go:42: 06:32:55 | demand-backup/22-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-gcp-0 --from-literal=data=100500 logger.go:42: 06:32:56 | demand-backup/22-read-data | configmap/06-read-data-gcp-0 created logger.go:42: 06:32:56 | demand-backup/22-read-data | + for i in 0 1 2 logger.go:42: 06:32:56 | demand-backup/22-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:56 | demand-backup/22-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:32:56 | demand-backup/22-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:56 | demand-backup/22-read-data | ++ local pod= logger.go:42: 06:32:56 | demand-backup/22-read-data | +++ get_client_pod logger.go:42: 06:32:56 | demand-backup/22-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:32:56 | demand-backup/22-read-data | ++ client_pod=mysql-client logger.go:42: 06:32:56 | demand-backup/22-read-data | ++ wait_pod mysql-client logger.go:42: 06:32:56 | demand-backup/22-read-data | ++ local pod=mysql-client logger.go:42: 06:32:56 | demand-backup/22-read-data | ++ set +o xtrace logger.go:42: 06:32:57 | demand-backup/22-read-data | mysql-clienttrue logger.go:42: 06:32:57 | demand-backup/22-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:57 | demand-backup/22-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:32:57 | demand-backup/22-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:32:58 | demand-backup/22-read-data | + data=100500 logger.go:42: 06:32:58 | demand-backup/22-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-gcp-1 --from-literal=data=100500 logger.go:42: 06:32:58 | demand-backup/22-read-data | configmap/06-read-data-gcp-1 created logger.go:42: 06:32:58 | demand-backup/22-read-data | + for i in 0 1 2 logger.go:42: 06:32:58 | demand-backup/22-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:58 | demand-backup/22-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:32:58 | demand-backup/22-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:58 | demand-backup/22-read-data | ++ local pod= logger.go:42: 06:32:58 | demand-backup/22-read-data | +++ get_client_pod logger.go:42: 06:32:58 | demand-backup/22-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:32:59 | demand-backup/22-read-data | ++ client_pod=mysql-client logger.go:42: 06:32:59 | demand-backup/22-read-data | ++ wait_pod mysql-client logger.go:42: 06:32:59 | demand-backup/22-read-data | ++ local pod=mysql-client logger.go:42: 06:32:59 | demand-backup/22-read-data | ++ set +o xtrace logger.go:42: 06:32:59 | demand-backup/22-read-data | mysql-clienttrue logger.go:42: 06:32:59 | demand-backup/22-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:32:59 | demand-backup/22-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:32:59 | demand-backup/22-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:33:00 | demand-backup/22-read-data | + data=100500 logger.go:42: 06:33:00 | demand-backup/22-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-gcp-2 --from-literal=data=100500 logger.go:42: 06:33:00 | demand-backup/22-read-data | configmap/06-read-data-gcp-2 created logger.go:42: 06:33:02 | demand-backup/22-read-data | test step completed 22-read-data logger.go:42: 06:33:02 | demand-backup/23-create-backup-azure | starting test step 23-create-backup-azure logger.go:42: 06:33:02 | demand-backup/23-create-backup-azure | PerconaServerMySQLBackup:kuttl-test-safe-ladybug/demand-backup-azure created logger.go:42: 06:33:14 | demand-backup/23-create-backup-azure | test step completed 23-create-backup-azure logger.go:42: 06:33:14 | demand-backup/24-delete-data | starting test step 24-delete-data logger.go:42: 06:33:14 | demand-backup/24-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -p'$password'" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 16-delete-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 06:33:14 | demand-backup/24-delete-data | + source ../../functions logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ realpath ../../.. logger.go:42: 06:33:14 | demand-backup/24-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:33:14 | demand-backup/24-delete-data | ++++ pwd logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:33:14 | demand-backup/24-delete-data | ++ test_name=demand-backup logger.go:42: 06:33:14 | demand-backup/24-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:33:14 | demand-backup/24-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:33:14 | demand-backup/24-delete-data | ++++ which gdate logger.go:42: 06:33:14 | demand-backup/24-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:33:14 | demand-backup/24-delete-data | ++++ which date logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ date=/usr/sbin/date logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ oc get projects logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ : logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ kubectl get nodes logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ grep '^minikube' logger.go:42: 06:33:14 | demand-backup/24-delete-data | ++ oc get projects logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ kubectl version -o json logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:33:14 | demand-backup/24-delete-data | +++ grep '\-eks\-' logger.go:42: 06:33:14 | demand-backup/24-delete-data | grep: warning: stray \ before - logger.go:42: 06:33:15 | demand-backup/24-delete-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:33:15 | demand-backup/24-delete-data | ++ '[' ']' logger.go:42: 06:33:15 | demand-backup/24-delete-data | ++ EKS=0 logger.go:42: 06:33:15 | demand-backup/24-delete-data | ++ get_user_pass root logger.go:42: 06:33:15 | demand-backup/24-delete-data | ++ local user=root logger.go:42: 06:33:15 | demand-backup/24-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:33:15 | demand-backup/24-delete-data | ++ base64 --decode logger.go:42: 06:33:15 | demand-backup/24-delete-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:33:15 | demand-backup/24-delete-data | +++ get_cluster_name logger.go:42: 06:33:15 | demand-backup/24-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:33:16 | demand-backup/24-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 06:33:16 | demand-backup/24-delete-data | ++ local cluster=demand-backup logger.go:42: 06:33:16 | demand-backup/24-delete-data | ++ echo demand-backup-haproxy logger.go:42: 06:33:16 | demand-backup/24-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:16 | demand-backup/24-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 06:33:16 | demand-backup/24-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:16 | demand-backup/24-delete-data | + local pod= logger.go:42: 06:33:16 | demand-backup/24-delete-data | ++ get_client_pod logger.go:42: 06:33:16 | demand-backup/24-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:33:16 | demand-backup/24-delete-data | + client_pod=mysql-client logger.go:42: 06:33:16 | demand-backup/24-delete-data | + wait_pod mysql-client logger.go:42: 06:33:16 | demand-backup/24-delete-data | + local pod=mysql-client logger.go:42: 06:33:16 | demand-backup/24-delete-data | + set +o xtrace logger.go:42: 06:33:17 | demand-backup/24-delete-data | mysql-clienttrue logger.go:42: 06:33:17 | demand-backup/24-delete-data | + kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:17 | demand-backup/24-delete-data | + sed -e 's/mysql: //' logger.go:42: 06:33:17 | demand-backup/24-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:33:18 | demand-backup/24-delete-data | + : logger.go:42: 06:33:18 | demand-backup/24-delete-data | ++ get_cluster_name logger.go:42: 06:33:18 | demand-backup/24-delete-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:33:18 | demand-backup/24-delete-data | + cluster_name=demand-backup logger.go:42: 06:33:18 | demand-backup/24-delete-data | + for i in 0 1 2 logger.go:42: 06:33:18 | demand-backup/24-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:18 | demand-backup/24-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:33:18 | demand-backup/24-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:18 | demand-backup/24-delete-data | ++ local pod= logger.go:42: 06:33:18 | demand-backup/24-delete-data | +++ get_client_pod logger.go:42: 06:33:18 | demand-backup/24-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:33:19 | demand-backup/24-delete-data | ++ client_pod=mysql-client logger.go:42: 06:33:19 | demand-backup/24-delete-data | ++ wait_pod mysql-client logger.go:42: 06:33:19 | demand-backup/24-delete-data | ++ local pod=mysql-client logger.go:42: 06:33:19 | demand-backup/24-delete-data | ++ set +o xtrace logger.go:42: 06:33:19 | demand-backup/24-delete-data | mysql-clienttrue logger.go:42: 06:33:19 | demand-backup/24-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:19 | demand-backup/24-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:33:19 | demand-backup/24-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:33:20 | demand-backup/24-delete-data | ++ : logger.go:42: 06:33:20 | demand-backup/24-delete-data | + data= logger.go:42: 06:33:20 | demand-backup/24-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 16-delete-data-azure-0 --from-literal=data= logger.go:42: 06:33:20 | demand-backup/24-delete-data | configmap/16-delete-data-azure-0 created logger.go:42: 06:33:20 | demand-backup/24-delete-data | + for i in 0 1 2 logger.go:42: 06:33:20 | demand-backup/24-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:20 | demand-backup/24-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:33:20 | demand-backup/24-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:20 | demand-backup/24-delete-data | ++ local pod= logger.go:42: 06:33:20 | demand-backup/24-delete-data | +++ get_client_pod logger.go:42: 06:33:20 | demand-backup/24-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:33:21 | demand-backup/24-delete-data | ++ client_pod=mysql-client logger.go:42: 06:33:21 | demand-backup/24-delete-data | ++ wait_pod mysql-client logger.go:42: 06:33:21 | demand-backup/24-delete-data | ++ local pod=mysql-client logger.go:42: 06:33:21 | demand-backup/24-delete-data | ++ set +o xtrace logger.go:42: 06:33:21 | demand-backup/24-delete-data | mysql-clienttrue logger.go:42: 06:33:21 | demand-backup/24-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:21 | demand-backup/24-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:33:21 | demand-backup/24-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:33:22 | demand-backup/24-delete-data | ++ : logger.go:42: 06:33:22 | demand-backup/24-delete-data | + data= logger.go:42: 06:33:22 | demand-backup/24-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 16-delete-data-azure-1 --from-literal=data= logger.go:42: 06:33:23 | demand-backup/24-delete-data | configmap/16-delete-data-azure-1 created logger.go:42: 06:33:23 | demand-backup/24-delete-data | + for i in 0 1 2 logger.go:42: 06:33:23 | demand-backup/24-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:23 | demand-backup/24-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:33:23 | demand-backup/24-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:23 | demand-backup/24-delete-data | ++ local pod= logger.go:42: 06:33:23 | demand-backup/24-delete-data | +++ get_client_pod logger.go:42: 06:33:23 | demand-backup/24-delete-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:33:23 | demand-backup/24-delete-data | ++ client_pod=mysql-client logger.go:42: 06:33:23 | demand-backup/24-delete-data | ++ wait_pod mysql-client logger.go:42: 06:33:23 | demand-backup/24-delete-data | ++ local pod=mysql-client logger.go:42: 06:33:23 | demand-backup/24-delete-data | ++ set +o xtrace logger.go:42: 06:33:24 | demand-backup/24-delete-data | mysql-clienttrue logger.go:42: 06:33:24 | demand-backup/24-delete-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:33:24 | demand-backup/24-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 06:33:24 | demand-backup/24-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:33:24 | demand-backup/24-delete-data | ++ : logger.go:42: 06:33:24 | demand-backup/24-delete-data | + data= logger.go:42: 06:33:24 | demand-backup/24-delete-data | + kubectl create configmap -n kuttl-test-safe-ladybug 16-delete-data-azure-2 --from-literal=data= logger.go:42: 06:33:25 | demand-backup/24-delete-data | configmap/16-delete-data-azure-2 created logger.go:42: 06:33:26 | demand-backup/24-delete-data | test step completed 24-delete-data logger.go:42: 06:33:26 | demand-backup/25-restore-from-azure | starting test step 25-restore-from-azure logger.go:42: 06:33:27 | demand-backup/25-restore-from-azure | PerconaServerMySQLRestore:kuttl-test-safe-ladybug/demand-backup-restore-azure created logger.go:42: 06:37:46 | demand-backup/25-restore-from-azure | test step completed 25-restore-from-azure logger.go:42: 06:37:46 | demand-backup/26-read-data | starting test step 26-read-data logger.go:42: 06:37:46 | demand-backup/26-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions password=$(get_user_pass root) cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -p'$password'") kubectl create configmap -n "${NAMESPACE}" 06-read-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 06:37:46 | demand-backup/26-read-data | + source ../../functions logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ realpath ../../.. logger.go:42: 06:37:46 | demand-backup/26-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:37:46 | demand-backup/26-read-data | ++++ pwd logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:37:46 | demand-backup/26-read-data | ++ test_name=demand-backup logger.go:42: 06:37:46 | demand-backup/26-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:37:46 | demand-backup/26-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ MYSQL_VERSION=8.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ export VAULT_VER=0.16.1 logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ VAULT_VER=0.16.1 logger.go:42: 06:37:46 | demand-backup/26-read-data | ++++ which gdate logger.go:42: 06:37:46 | demand-backup/26-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:37:46 | demand-backup/26-read-data | ++++ which date logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ date=/usr/sbin/date logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ oc get projects logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ : logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ kubectl get nodes logger.go:42: 06:37:46 | demand-backup/26-read-data | +++ grep '^minikube' logger.go:42: 06:37:47 | demand-backup/26-read-data | ++ oc get projects logger.go:42: 06:37:47 | demand-backup/26-read-data | +++ kubectl version -o json logger.go:42: 06:37:47 | demand-backup/26-read-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:37:47 | demand-backup/26-read-data | +++ grep '\-eks\-' logger.go:42: 06:37:47 | demand-backup/26-read-data | grep: warning: stray \ before - logger.go:42: 06:37:47 | demand-backup/26-read-data | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:37:47 | demand-backup/26-read-data | ++ '[' ']' logger.go:42: 06:37:47 | demand-backup/26-read-data | ++ EKS=0 logger.go:42: 06:37:47 | demand-backup/26-read-data | ++ get_user_pass root logger.go:42: 06:37:47 | demand-backup/26-read-data | ++ local user=root logger.go:42: 06:37:47 | demand-backup/26-read-data | ++ base64 --decode logger.go:42: 06:37:47 | demand-backup/26-read-data | ++ kubectl -n kuttl-test-safe-ladybug get secret test-secrets -o 'jsonpath={.data.root}' logger.go:42: 06:37:48 | demand-backup/26-read-data | + password='w(nT7<_+scfBV.#>Ox5' logger.go:42: 06:37:48 | demand-backup/26-read-data | ++ get_cluster_name logger.go:42: 06:37:48 | demand-backup/26-read-data | ++ kubectl -n kuttl-test-safe-ladybug get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 06:37:48 | demand-backup/26-read-data | + cluster_name=demand-backup logger.go:42: 06:37:48 | demand-backup/26-read-data | + for i in 0 1 2 logger.go:42: 06:37:48 | demand-backup/26-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:48 | demand-backup/26-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:37:48 | demand-backup/26-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:48 | demand-backup/26-read-data | ++ local pod= logger.go:42: 06:37:48 | demand-backup/26-read-data | +++ get_client_pod logger.go:42: 06:37:48 | demand-backup/26-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:37:49 | demand-backup/26-read-data | ++ client_pod=mysql-client logger.go:42: 06:37:49 | demand-backup/26-read-data | ++ wait_pod mysql-client logger.go:42: 06:37:49 | demand-backup/26-read-data | ++ local pod=mysql-client logger.go:42: 06:37:49 | demand-backup/26-read-data | ++ set +o xtrace logger.go:42: 06:37:49 | demand-backup/26-read-data | mysql-clienttrue logger.go:42: 06:37:49 | demand-backup/26-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:37:49 | demand-backup/26-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:49 | demand-backup/26-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:37:50 | demand-backup/26-read-data | + data=100500 logger.go:42: 06:37:50 | demand-backup/26-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-azure-0 --from-literal=data=100500 logger.go:42: 06:37:50 | demand-backup/26-read-data | configmap/06-read-data-azure-0 created logger.go:42: 06:37:50 | demand-backup/26-read-data | + for i in 0 1 2 logger.go:42: 06:37:50 | demand-backup/26-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:50 | demand-backup/26-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:37:50 | demand-backup/26-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:50 | demand-backup/26-read-data | ++ local pod= logger.go:42: 06:37:50 | demand-backup/26-read-data | +++ get_client_pod logger.go:42: 06:37:50 | demand-backup/26-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:37:51 | demand-backup/26-read-data | ++ client_pod=mysql-client logger.go:42: 06:37:51 | demand-backup/26-read-data | ++ wait_pod mysql-client logger.go:42: 06:37:51 | demand-backup/26-read-data | ++ local pod=mysql-client logger.go:42: 06:37:51 | demand-backup/26-read-data | ++ set +o xtrace logger.go:42: 06:37:51 | demand-backup/26-read-data | mysql-clienttrue logger.go:42: 06:37:51 | demand-backup/26-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:51 | demand-backup/26-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:37:51 | demand-backup/26-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:37:52 | demand-backup/26-read-data | + data=100500 logger.go:42: 06:37:52 | demand-backup/26-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-azure-1 --from-literal=data=100500 logger.go:42: 06:37:53 | demand-backup/26-read-data | configmap/06-read-data-azure-1 created logger.go:42: 06:37:53 | demand-backup/26-read-data | + for i in 0 1 2 logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ local pod= logger.go:42: 06:37:53 | demand-backup/26-read-data | +++ get_client_pod logger.go:42: 06:37:53 | demand-backup/26-read-data | +++ kubectl -n kuttl-test-safe-ladybug get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ client_pod=mysql-client logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ wait_pod mysql-client logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ local pod=mysql-client logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ set +o xtrace logger.go:42: 06:37:53 | demand-backup/26-read-data | mysql-clienttrue logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ kubectl -n kuttl-test-safe-ladybug exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -p'\''w(nT7<_+scfBV.#>Ox5'\''' logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ sed -e 's/mysql: //' logger.go:42: 06:37:53 | demand-backup/26-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 06:37:54 | demand-backup/26-read-data | + data=100500 logger.go:42: 06:37:54 | demand-backup/26-read-data | + kubectl create configmap -n kuttl-test-safe-ladybug 06-read-data-azure-2 --from-literal=data=100500 logger.go:42: 06:37:55 | demand-backup/26-read-data | configmap/06-read-data-azure-2 created logger.go:42: 06:37:56 | demand-backup/26-read-data | test step completed 26-read-data logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | starting test step 27-delete-all-backups logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete ps-backup --all -n "${NAMESPACE}" backup_name_minio="demand-backup-minio" accessKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_ACCESS_KEY_ID}' | base64 -d)" secretKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_SECRET_ACCESS_KEY}' | base64 -d)" backup_exists=$( kubectl run -n "${NAMESPACE}" -i --rm aws-cli --image=perconalab/awscli --restart=Never -- \ /usr/bin/env AWS_ACCESS_KEY_ID="${accessKey}" AWS_SECRET_ACCESS_KEY="${secretKey}" AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls 'operator-testing/' | grep -c "${backup_name_minio}/" | cat exit "${PIPESTATUS[0]}" ) if [[ 1 -eq $backup_exists ]]; then echo "Backup was not removed from bucket -- minio" exit 1 fi] logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | + source ../../functions logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ realpath ../../.. logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | ++++ pwd logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | ++ test_name=demand-backup logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ MYSQL_VERSION=8.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export MINIO_VER=5.4.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ MINIO_VER=5.4.0 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ export VAULT_VER=0.16.1 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ VAULT_VER=0.16.1 logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | ++++ which gdate logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | ++++ which date logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ date=/usr/sbin/date logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ oc get projects logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ : logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ kubectl get nodes logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ grep '^minikube' logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | ++ oc get projects logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ kubectl version -o json logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | +++ grep '\-eks\-' logger.go:42: 06:37:56 | demand-backup/27-delete-all-backups | grep: warning: stray \ before - logger.go:42: 06:37:57 | demand-backup/27-delete-all-backups | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:37:57 | demand-backup/27-delete-all-backups | ++ '[' ']' logger.go:42: 06:37:57 | demand-backup/27-delete-all-backups | ++ EKS=0 logger.go:42: 06:37:57 | demand-backup/27-delete-all-backups | + kubectl delete ps-backup --all -n kuttl-test-safe-ladybug logger.go:42: 06:37:57 | demand-backup/27-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-azure" deleted from kuttl-test-safe-ladybug namespace logger.go:42: 06:37:58 | demand-backup/27-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-gcp" deleted from kuttl-test-safe-ladybug namespace logger.go:42: 06:37:58 | demand-backup/27-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-minio" deleted from kuttl-test-safe-ladybug namespace logger.go:42: 06:37:58 | demand-backup/27-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-s3" deleted from kuttl-test-safe-ladybug namespace logger.go:42: 06:38:01 | demand-backup/27-delete-all-backups | + backup_name_minio=demand-backup-minio logger.go:42: 06:38:01 | demand-backup/27-delete-all-backups | ++ kubectl -n kuttl-test-safe-ladybug get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 06:38:01 | demand-backup/27-delete-all-backups | ++ base64 -d logger.go:42: 06:38:02 | demand-backup/27-delete-all-backups | + accessKey=some-access-key logger.go:42: 06:38:02 | demand-backup/27-delete-all-backups | ++ kubectl -n kuttl-test-safe-ladybug get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 06:38:02 | demand-backup/27-delete-all-backups | ++ base64 -d logger.go:42: 06:38:02 | demand-backup/27-delete-all-backups | + secretKey=some-secret-key logger.go:42: 06:38:02 | demand-backup/27-delete-all-backups | ++ cat logger.go:42: 06:38:02 | demand-backup/27-delete-all-backups | ++ grep -c demand-backup-minio/ logger.go:42: 06:38:02 | demand-backup/27-delete-all-backups | ++ kubectl run -n kuttl-test-safe-ladybug -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls operator-testing/ logger.go:42: 06:38:03 | demand-backup/27-delete-all-backups | All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. logger.go:42: 06:38:03 | demand-backup/27-delete-all-backups | If you don't see a command prompt, try pressing enter. logger.go:42: 06:38:04 | demand-backup/27-delete-all-backups | warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: unable to upgrade connection: container aws-cli not found in pod aws-cli_kuttl-test-safe-ladybug logger.go:42: 06:38:06 | demand-backup/27-delete-all-backups | ++ exit 0 logger.go:42: 06:38:06 | demand-backup/27-delete-all-backups | + backup_exists=0 logger.go:42: 06:38:06 | demand-backup/27-delete-all-backups | + [[ 1 -eq 0 ]] logger.go:42: 06:38:06 | demand-backup/27-delete-all-backups | test step completed 27-delete-all-backups logger.go:42: 06:38:06 | demand-backup/98-drop-finalizer | starting test step 98-drop-finalizer logger.go:42: 06:38:06 | demand-backup/98-drop-finalizer | PerconaServerMySQL:kuttl-test-safe-ladybug/demand-backup updated logger.go:42: 06:38:06 | demand-backup/98-drop-finalizer | test step completed 98-drop-finalizer logger.go:42: 06:38:06 | demand-backup/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions destroy_operator] logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/tests/demand-backup logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | ++ test_name=demand-backup logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/vars.sh logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/deploy logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/e2e-tests/conf logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-1041 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-1041 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export VERSION=PR-1041-fa9862d8 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ VERSION=PR-1041-fa9862d8 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1041-fa9862d8 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export MYSQL_VERSION=8.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ MYSQL_VERSION=8.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export MINIO_VER=5.4.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ MINIO_VER=5.4.0 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ export VAULT_VER=0.16.1 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ VAULT_VER=0.16.1 logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1041/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | ++++ which date logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ date=/usr/sbin/date logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ oc get projects logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ : logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ kubectl get nodes logger.go:42: 06:38:07 | demand-backup/99-remove-cluster-gracefully | +++ grep '^minikube' logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | ++ oc get projects logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | +++ kubectl version -o json logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | +++ jq -r .serverVersion.gitVersion logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | +++ grep '\-eks\-' logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | grep: warning: stray \ before - logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | Warning: version difference between client (1.34) and server (1.30) exceeds the supported minor version skew of +/-1 logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | ++ '[' ']' logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | ++ EKS=0 logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 06:38:08 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 06:38:09 | demand-backup/99-remove-cluster-gracefully | deployment.apps "percona-server-mysql-operator" force deleted from ps-operator namespace logger.go:42: 06:38:09 | demand-backup/99-remove-cluster-gracefully | + [[ -n ps-operator ]] logger.go:42: 06:38:09 | demand-backup/99-remove-cluster-gracefully | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 06:38:09 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 06:38:09 | demand-backup/99-remove-cluster-gracefully | namespace "ps-operator" force deleted logger.go:42: 06:38:17 | demand-backup/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 06:38:17 | demand-backup | demand-backup events from ns kuttl-test-safe-ladybug: logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:22 +0000 UTC Normal Pod mysql-client Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/mysql-client to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:22 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:27 +0000 UTC Normal PersistentVolumeClaim minio-service WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:30 +0000 UTC Normal ReplicaSet.apps minio-service-86dfccd949 SuccessfulCreate Created pod: minio-service-86dfccd949-fj6xm replicaset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:30 +0000 UTC Normal Deployment.apps minio-service ScalingReplicaSet Scaled up replica set minio-service-86dfccd949 to 1 deployment-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:30 +0000 UTC Normal PersistentVolumeClaim minio-service ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:30 +0000 UTC Normal PersistentVolumeClaim minio-service Provisioning External provisioner is provisioning volume for claim "kuttl-test-safe-ladybug/minio-service" pd.csi.storage.gke.io_gke-5cefd2653acd40f3b7b1-8a00-3e0b-vm_46bc7d6b-bced-49e4-808d-645fd03a327b logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:31 +0000 UTC Normal Job.batch minio-service-post-job SuccessfulCreate Created pod: minio-service-post-job-7v4sd job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:32 +0000 UTC Normal Pod minio-service-post-job-7v4sd Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/minio-service-post-job-7v4sd to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:32 +0000 UTC Normal Pod minio-service-post-job-7v4sd.spec.containers{minio-make-user} Pulling Pulling image "quay.io/minio/mc:RELEASE.2024-11-21T17-21-54Z" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:33 +0000 UTC Normal Pod minio-service-post-job-7v4sd.spec.containers{minio-make-user} Pulled Successfully pulled image "quay.io/minio/mc:RELEASE.2024-11-21T17-21-54Z" in 1.494s (1.494s including waiting). Image size: 28122288 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:33 +0000 UTC Normal Pod minio-service-post-job-7v4sd.spec.containers{minio-make-user} Created Created container: minio-make-user kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:33 +0000 UTC Normal Pod minio-service-post-job-7v4sd.spec.containers{minio-make-user} Started Started container minio-make-user kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:34 +0000 UTC Normal Pod minio-service-86dfccd949-fj6xm Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/minio-service-86dfccd949-fj6xm to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:34 +0000 UTC Normal PersistentVolumeClaim minio-service ProvisioningSucceeded Successfully provisioned volume pvc-b8f827d4-8836-4ea9-9cc1-4e07add0200e pd.csi.storage.gke.io_gke-5cefd2653acd40f3b7b1-8a00-3e0b-vm_46bc7d6b-bced-49e4-808d-645fd03a327b logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:39 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 17.115s (17.115s including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:39 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Created Created container: mysql-client kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:40 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Started Started container mysql-client kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:42 +0000 UTC Normal Pod minio-service-86dfccd949-fj6xm SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b8f827d4-8836-4ea9-9cc1-4e07add0200e" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:44 +0000 UTC Normal Pod minio-service-86dfccd949-fj6xm.spec.containers{minio} Pulling Pulling image "quay.io/minio/minio:RELEASE.2024-12-18T13-15-44Z" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:46 +0000 UTC Normal Pod minio-service-86dfccd949-fj6xm.spec.containers{minio} Pulled Successfully pulled image "quay.io/minio/minio:RELEASE.2024-12-18T13-15-44Z" in 2.203s (2.203s including waiting). Image size: 62642371 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:46 +0000 UTC Normal Pod minio-service-86dfccd949-fj6xm.spec.containers{minio} Created Created container: minio kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:46 +0000 UTC Normal Pod minio-service-86dfccd949-fj6xm.spec.containers{minio} Started Started container minio kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:51 +0000 UTC Normal Job.batch minio-service-post-job Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:54 +0000 UTC Normal Pod aws-cli Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/aws-cli to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:54 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:56 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 2.361s (2.361s including waiting). Image size: 30314917 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:56 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container: aws-cli kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:06:57 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:09 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:09 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:09 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-safe-ladybug/datadir-demand-backup-mysql-0" pd.csi.storage.gke.io_gke-5cefd2653acd40f3b7b1-8a00-3e0b-vm_46bc7d6b-bced-49e4-808d-645fd03a327b logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:09 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-0 Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:09 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:10 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:10 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:10 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-orchestrator NoPods No matching pods found controllermanager logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:10 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup ClusterStateChanged -> Initializing ps-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 98ms (98ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:11 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:11 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup ClusterStateChanged Error -> Initializing ps-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:13 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c pd.csi.storage.gke.io_gke-5cefd2653acd40f3b7b1-8a00-3e0b-vm_46bc7d6b-bced-49e4-808d-645fd03a327b logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:13 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:13 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 1.064s (1.064s including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 103ms (103ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:14 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:15 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:22 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:23 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 3.239s (3.239s including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 14.886s (14.886s including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:46 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:46 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:46 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:49 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 2.66s (2.66s including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:49 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:49 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:53 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:54 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 1.18s (1.18s including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:54 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:54 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:54 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:54 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 72ms (72ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:54 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:07:54 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 17.856s (17.856s including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:08 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 4.721s (4.721s including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:08 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:08 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:18 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:18 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:18 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-safe-ladybug/datadir-demand-backup-mysql-1" pd.csi.storage.gke.io_gke-5cefd2653acd40f3b7b1-8a00-3e0b-vm_46bc7d6b-bced-49e4-808d-645fd03a327b logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:18 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-1 Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:18 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:19 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:19 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:19 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 224ms (224ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:22 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-63901865-de41-42b5-95cc-af65b78dab8a pd.csi.storage.gke.io_gke-5cefd2653acd40f3b7b1-8a00-3e0b-vm_46bc7d6b-bced-49e4-808d-645fd03a327b logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:22 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:22 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 2.182s (2.182s including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 82ms (82ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:26 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:26 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 105ms (105ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:30 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-63901865-de41-42b5-95cc-af65b78dab8a" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:30 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:32 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 1.287s (1.287s including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:32 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:32 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:32 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 99ms (99ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 79ms (79ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 88ms (88ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:37 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:39 +0000 UTC Warning Pod demand-backup-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:42 +0000 UTC Warning Pod demand-backup-orc-2.spec.containers{orchestrator} Unhealthy Liveness probe failed: Get "http://10.182.32.7:3000/api/lb-check": dial tcp 10.182.32.7:3000: connect: connection refused kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 14.549s (14.549s including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 4.167s (4.167s including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:08:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:05 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:09:04 Waiting for MySQL ready state 2025/09/11 06:09:04 MySQL is ready 2025/09/11 06:09:04 Peers: [3231363737303265.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3665303366646565.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:09:04 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:09:04 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:09:04 lookup demand-backup-mysql-1 [10.182.33.10] 2025/09/11 06:09:04 PodIP: 10.182.33.10 2025/09/11 06:09:04 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.6] 2025/09/11 06:09:04 PrimaryIP: 10.182.32.6 2025/09/11 06:09:04 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:09:04 Opening connection to 10.182.33.10 2025/09/11 06:09:04 Clone required: true 2025/09/11 06:09:04 Checking if a clone in progress 2025/09/11 06:09:04 Clone in progress: false 2025/09/11 06:09:04 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:09:05 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 70ms (70ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:44 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:45 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:45 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-safe-ladybug/datadir-demand-backup-mysql-2" pd.csi.storage.gke.io_gke-5cefd2653acd40f3b7b1-8a00-3e0b-vm_46bc7d6b-bced-49e4-808d-645fd03a327b logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:45 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-2 Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:45 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:48 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-37a4fc22-a539-4059-a607-4ae376d331cc pd.csi.storage.gke.io_gke-5cefd2653acd40f3b7b1-8a00-3e0b-vm_46bc7d6b-bced-49e4-808d-645fd03a327b logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:49 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:54 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:54 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:55 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:55 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 96ms (96ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:55 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:55 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:56 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:56 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-37a4fc22-a539-4059-a607-4ae376d331cc" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 95ms (95ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 2.014s (2.014s including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 111ms (111ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:09:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:15 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:15 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:15 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 14.59s (14.59s including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:15 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:15 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:15 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 4.322s (4.322s including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:33 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:33 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 18.582s (18.582s including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:33 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:33 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:33 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 3.554s (3.554s including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 80ms (80ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 3.243s (3.243s including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:42 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:10:38 Waiting for bootstrap.lock to be deleted 2025/09/11 06:10:42 Waiting for MySQL ready state 2025/09/11 06:10:42 MySQL is ready 2025/09/11 06:10:42 Peers: [3230353162373262.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3231363737303265.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3665303366646565.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:10:42 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:10:42 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:10:42 lookup demand-backup-mysql-2 [10.182.34.14] 2025/09/11 06:10:42 PodIP: 10.182.34.14 2025/09/11 06:10:42 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.6] 2025/09/11 06:10:42 PrimaryIP: 10.182.32.6 2025/09/11 06:10:42 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:10:42 Opening connection to 10.182.34.14 2025/09/11 06:10:42 Clone required: true 2025/09/11 06:10:42 Checking if a clone in progress 2025/09/11 06:10:42 Clone in progress: false 2025/09/11 06:10:42 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:10:42 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:10:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 199ms (199ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:28 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup ClusterStateChanged Initializing -> Ready ps-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:41 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup ClusterStateChanged Ready -> Initializing ps-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:43 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/09/11 06:11:43 MySQL state is not ready... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:48 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/09/11 06:11:48 MySQL state is not ready... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:53 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:58 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 115ms (115ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:11:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:00 +0000 UTC Warning Pod demand-backup-haproxy-1.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 183ms (183ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 165ms (165ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 188ms (188ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:20 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:12:19 Waiting for MySQL ready state 2025/09/11 06:12:19 MySQL is ready 2025/09/11 06:12:19 Peers: [3230353162373262.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3231363737303265.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3433396165613931.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:12:19 FQDN: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:12:19 Primary: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:12:19 lookup demand-backup-mysql-0 [10.182.32.9] 2025/09/11 06:12:19 PodIP: 10.182.32.9 2025/09/11 06:12:19 lookup demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.33.10] 2025/09/11 06:12:19 PrimaryIP: 10.182.33.10 2025/09/11 06:12:20 Donor: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:12:20 Opening connection to 10.182.32.9 2025/09/11 06:12:20 Clone required: true 2025/09/11 06:12:20 Checking if a clone in progress 2025/09/11 06:12:20 Clone in progress: false 2025/09/11 06:12:20 Cloning from demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:12:20 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:12:24 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 109ms (109ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:13:01 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup AsyncReplicationNotReady demand-backup-mysql-2: [not_replicating] ps-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:13:58 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio SuccessfulCreate Created pod: xb-demand-backup-minio-minio-sksjv job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:13:59 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-demand-backup-minio-minio-sksjv to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:13:59 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:13:59 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 110ms (110ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:13:59 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:13:59 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:00 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:01 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 164ms (164ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:01 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:01 +0000 UTC Normal Pod xb-demand-backup-minio-minio-sksjv.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:07 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:27 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:27 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:27 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup ClusterStateChanged Ready -> Stopping ps-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:28 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:28 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:28 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:28 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:30 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:30 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:30 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:31 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:31 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:32 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-haproxy NoPods No matching pods found controllermanager logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:32 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:34 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/09/11 06:14:34 MySQL state is not ready... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:53 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:56 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-mysql NoPods No matching pods found controllermanager logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:56 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup ClusterStateChanged Stopping -> Paused ps-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:57 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-restore-demand-backup-restore-minio-qtldc to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:57 +0000 UTC Warning Pod xb-restore-demand-backup-restore-minio-qtldc FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:14:57 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-qtldc job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:16 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:17 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:17 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 116ms (116ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:17 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:18 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:20 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:20 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 86ms (86ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:20 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:20 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-qtldc.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:28 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:28 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:28 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup ClusterStateChanged Paused -> Initializing ps-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:28 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:29 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:29 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:29 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 103ms (103ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:29 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:30 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 80ms (80ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 80ms (80ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:45 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:46 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:46 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 122ms (122ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:46 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:46 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 112ms (112ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 105ms (105ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:49 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 79ms (79ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:49 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:15:49 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:04 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:05 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:05 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 117ms (117ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:05 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:05 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 84ms (84ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 78ms (78ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:21 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:24 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:25 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 160ms (160ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:25 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:25 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 191ms (191ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 89ms (89ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:29 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-63901865-de41-42b5-95cc-af65b78dab8a" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 234ms (234ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 136ms (136ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 201ms (201ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 189ms (189ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:34 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:35 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:40 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:41 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:41 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 213ms (213ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:41 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:41 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 219ms (219ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:43 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:43 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 180ms (180ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:43 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:43 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:44 +0000 UTC Warning Pod demand-backup-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:53 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:16:52 Waiting for MySQL ready state 2025/09/11 06:16:52 MySQL is ready 2025/09/11 06:16:52 Peers: [3137386239616562.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3939313834363335.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:16:52 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:16:52 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:16:52 lookup demand-backup-mysql-1 [10.182.33.15] 2025/09/11 06:16:52 PodIP: 10.182.33.15 2025/09/11 06:16:52 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.10] 2025/09/11 06:16:52 PrimaryIP: 10.182.32.10 2025/09/11 06:16:52 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:16:52 Opening connection to 10.182.33.15 2025/09/11 06:16:52 Clone required: true 2025/09/11 06:16:52 Checking if a clone in progress 2025/09/11 06:16:52 Clone in progress: false 2025/09/11 06:16:52 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:16:53 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:16:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 90ms (90ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:27 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:30 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:30 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:31 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 126ms (126ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:31 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:31 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:32 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:32 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 80ms (80ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:32 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:32 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:32 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:33 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 98ms (98ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:33 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:33 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:34 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-37a4fc22-a539-4059-a607-4ae376d331cc" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:36 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:36 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 112ms (112ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:36 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:36 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 86ms (86ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 107ms (107ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 99ms (99ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:51 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:51 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:51 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 127ms (127ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:51 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:51 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 81ms (81ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 112ms (112ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:56 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:17:56 Waiting for MySQL ready state 2025/09/11 06:17:56 MySQL is ready 2025/09/11 06:17:56 Peers: [3137386239616562.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3365353638623230.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3939313834363335.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:17:56 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:17:56 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:17:56 lookup demand-backup-mysql-2 [10.182.34.17] 2025/09/11 06:17:56 PodIP: 10.182.34.17 2025/09/11 06:17:56 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.10] 2025/09/11 06:17:56 PrimaryIP: 10.182.32.10 2025/09/11 06:17:56 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:17:56 Opening connection to 10.182.34.17 2025/09/11 06:17:56 Clone required: true 2025/09/11 06:17:56 Checking if a clone in progress 2025/09/11 06:17:56 Clone in progress: false 2025/09/11 06:17:56 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:17:56 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:17:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:18:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 87ms (87ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:05 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:05 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:05 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:05 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:05 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:05 +0000 UTC Warning Pod demand-backup-orc-2.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.182.32.11:3000/api/health": dial tcp 10.182.32.11:3000: connect: connection refused kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:06 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:06 +0000 UTC Warning Pod demand-backup-haproxy-1.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:08 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:12 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:12 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:12 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:16 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/09/11 06:19:16 MySQL state is not ready... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:21 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:25 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:25 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:25 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:25 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:25 +0000 UTC Warning Pod demand-backup-orc-1.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.182.34.16:3000/api/health": dial tcp 10.182.34.16:3000: connect: connection refused kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:35 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-restore-demand-backup-restore-minio-backup-source-xb5zt to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:35 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-backup-source-xb5zt job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:51 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:51 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 105ms (105ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:51 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:51 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:52 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:52 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 80ms (80ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:52 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:52 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-xb5zt.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:19:59 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:00 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:00 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:00 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 102ms (102ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 78ms (78ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 82ms (82ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:22 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:24 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:24 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 120ms (121ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:24 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:24 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 83ms (83ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 129ms (129ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 76ms (76ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:36 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:37 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:37 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 114ms (114ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:37 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:37 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:38 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:40 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 91ms (91ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:40 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:40 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:40 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:40 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 106ms (106ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:40 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:40 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:20:59 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:02 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 115ms (115ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 212ms (212ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 248ms (248ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:05 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:10 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-63901865-de41-42b5-95cc-af65b78dab8a" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:11 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 222ms (222ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:12 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:12 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:12 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 109ms (110ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:12 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:12 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 84ms (84ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 86ms (86ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 96ms (96ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 96ms (96ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 83ms (83ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:22 +0000 UTC Warning Pod demand-backup-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:32 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:21:31 Waiting for MySQL ready state 2025/09/11 06:21:31 MySQL is ready 2025/09/11 06:21:31 Peers: [3332306261653566.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3464306664666565.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:21:31 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:21:31 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:21:31 lookup demand-backup-mysql-1 [10.182.33.19] 2025/09/11 06:21:31 PodIP: 10.182.33.19 2025/09/11 06:21:31 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.13] 2025/09/11 06:21:31 PrimaryIP: 10.182.32.13 2025/09/11 06:21:31 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:21:31 Opening connection to 10.182.33.19 2025/09/11 06:21:31 Clone required: true 2025/09/11 06:21:31 Checking if a clone in progress 2025/09/11 06:21:31 Clone in progress: false 2025/09/11 06:21:31 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:21:32 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:21:36 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 113ms (113ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:07 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:08 +0000 UTC Warning Pod demand-backup-mysql-2 FailedMount MountVolume.SetUp failed for volume "vault-keyring-secret" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:08 +0000 UTC Warning Pod demand-backup-mysql-2 FailedMount MountVolume.SetUp failed for volume "config" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:12 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 133ms (133ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 72ms (72ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 103ms (103ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:15 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-37a4fc22-a539-4059-a607-4ae376d331cc" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 164ms (164ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:19 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 108ms (108ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 81ms (81ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 197ms (197ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:33 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:34 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:34 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 212ms (212ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:34 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:34 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:35 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:36 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 117ms (117ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:36 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:36 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:36 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:36 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 75ms (75ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:36 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:36 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:38 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:22:38 Waiting for MySQL ready state 2025/09/11 06:22:38 MySQL is ready 2025/09/11 06:22:38 Peers: [3136663139346336.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3332306261653566.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3464306664666565.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:22:38 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:22:38 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:22:38 lookup demand-backup-mysql-2 [10.182.34.20] 2025/09/11 06:22:38 PodIP: 10.182.34.20 2025/09/11 06:22:38 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.13] 2025/09/11 06:22:38 PrimaryIP: 10.182.32.13 2025/09/11 06:22:38 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:22:38 Opening connection to 10.182.34.20 2025/09/11 06:22:38 Clone required: true 2025/09/11 06:22:38 Checking if a clone in progress 2025/09/11 06:22:38 Clone in progress: false 2025/09/11 06:22:38 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:22:38 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:22:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 82ms (82ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:30 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-demand-backup-s3-aws-s3-6gbk7 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:30 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:30 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 SuccessfulCreate Created pod: xb-demand-backup-s3-aws-s3-6gbk7 job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:31 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 134ms (134ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:31 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:31 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:33 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:33 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 138ms (138ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:33 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:33 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-6gbk7.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:39 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:56 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:56 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:57 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:57 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:57 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:57 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:57 +0000 UTC Warning Pod demand-backup-orc-2.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.182.32.14:3000/api/health": dial tcp 10.182.32.14:3000: connect: connection refused kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:58 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:23:58 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:00 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:00 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:00 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:05 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:05 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:06 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:06 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:08 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-restore-demand-backup-restore-s3-b6qqr to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:26 +0000 UTC Warning Pod xb-restore-demand-backup-restore-s3-b6qqr FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:26 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 SuccessfulCreate Created pod: xb-restore-demand-backup-restore-s3-b6qqr job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:45 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:47 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:47 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 239ms (239ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:47 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:47 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 211ms (211ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-b6qqr.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:57 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:57 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:57 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:57 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 106ms (106ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:24:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 90ms (90ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 83ms (83ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:17 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:18 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:18 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 116ms (116ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:18 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:18 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 78ms (78ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 139ms (139ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:20 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 83ms (83ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:21 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:21 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:33 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:33 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:34 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 117ms (117ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:34 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:34 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 100ms (100ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 105ms (105ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:53 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:54 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 102ms (102ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 92ms (92ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 91ms (91ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:25:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:01 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-63901865-de41-42b5-95cc-af65b78dab8a" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:04 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:04 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 122ms (122ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:04 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:04 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 108ms (108ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 106ms (106ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 89ms (89ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:09 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:09 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:09 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 148ms (148ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:09 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:09 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 81ms (81ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 122ms (122ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:14 +0000 UTC Warning Pod demand-backup-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:25 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:26:24 Waiting for MySQL ready state 2025/09/11 06:26:24 MySQL is ready 2025/09/11 06:26:24 Peers: [3265613435346233.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3433333430616162.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:26:24 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:26:24 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:26:24 lookup demand-backup-mysql-1 [10.182.33.24] 2025/09/11 06:26:24 PodIP: 10.182.33.24 2025/09/11 06:26:24 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.16] 2025/09/11 06:26:24 PrimaryIP: 10.182.32.16 2025/09/11 06:26:24 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:26:24 Opening connection to 10.182.33.24 2025/09/11 06:26:24 Clone required: true 2025/09/11 06:26:24 Checking if a clone in progress 2025/09/11 06:26:24 Clone in progress: false 2025/09/11 06:26:24 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:26:25 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 95ms (95ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:26:59 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:04 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 143ms (143ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 94ms (94ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 93ms (93ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:07 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-37a4fc22-a539-4059-a607-4ae376d331cc" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:08 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:08 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 107ms (107ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:08 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:08 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 104ms (104ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 91ms (91ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 91ms (91ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:11 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:25 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 88ms (88ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 94ms (94ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 86ms (86ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:29 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:27:28 Waiting for MySQL ready state 2025/09/11 06:27:28 MySQL is ready 2025/09/11 06:27:28 Peers: [3265613435346233.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3433333430616162.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3733633532346364.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:27:28 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:27:28 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:27:28 lookup demand-backup-mysql-2 [10.182.34.23] 2025/09/11 06:27:28 PodIP: 10.182.34.23 2025/09/11 06:27:28 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.16] 2025/09/11 06:27:28 PrimaryIP: 10.182.32.16 2025/09/11 06:27:28 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:27:28 Opening connection to 10.182.34.23 2025/09/11 06:27:28 Clone required: true 2025/09/11 06:27:28 Checking if a clone in progress 2025/09/11 06:27:28 Clone in progress: false 2025/09/11 06:27:28 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:27:29 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:27:32 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 117ms (117ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:18 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-demand-backup-gcp-gcp-cs-pdf6n to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:18 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:18 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 139ms (139ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:18 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:18 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs SuccessfulCreate Created pod: xb-demand-backup-gcp-gcp-cs-pdf6n job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:19 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:20 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:20 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 77ms (77ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:20 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:20 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-pdf6n.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:26 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:43 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:43 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:43 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:43 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:43 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:44 +0000 UTC Warning Pod demand-backup-orc-2.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.182.32.17:3000/api/health": dial tcp 10.182.32.17:3000: connect: connection refused kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:46 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:46 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:53 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/09/11 06:28:53 MySQL state is not ready... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:58 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:58 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:28:58 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:13 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-restore-demand-backup-restore-gcp-9557s to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:13 +0000 UTC Warning Pod xb-restore-demand-backup-restore-gcp-9557s FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:13 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp SuccessfulCreate Created pod: xb-restore-demand-backup-restore-gcp-9557s job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:31 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:32 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:32 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 130ms (130ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:32 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:32 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 112ms (112ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-9557s.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:43 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:44 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:44 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:45 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 94ms (94ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 107ms (107ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 97ms (97ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:29:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:03 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 101ms (101ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 83ms (83ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 114ms (114ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 97ms (97ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:07 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:20 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 131ms (131ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 103ms (103ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 97ms (97ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:39 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:41 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 137ms (137ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:43 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:43 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 119ms (119ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:43 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 96ms (96ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:47 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-63901865-de41-42b5-95cc-af65b78dab8a" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 119ms (119ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 80ms (80ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 95ms (95ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 96ms (96ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:56 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 130ms (130ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 107ms (107ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 71ms (71ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:30:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:01 +0000 UTC Warning Pod demand-backup-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:11 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:31:10 Waiting for MySQL ready state 2025/09/11 06:31:10 MySQL is ready 2025/09/11 06:31:10 Peers: [3734663465303034.demand-backup-mysql-unready.kuttl-test-safe-ladybug 6163343238393665.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:31:10 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:31:10 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:31:10 lookup demand-backup-mysql-1 [10.182.33.29] 2025/09/11 06:31:10 PodIP: 10.182.33.29 2025/09/11 06:31:10 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.19] 2025/09/11 06:31:10 PrimaryIP: 10.182.32.19 2025/09/11 06:31:10 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:31:10 Opening connection to 10.182.33.29 2025/09/11 06:31:10 Clone required: true 2025/09/11 06:31:10 Checking if a clone in progress 2025/09/11 06:31:10 Clone in progress: false 2025/09/11 06:31:10 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:31:11 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:11 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 89ms (89ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:45 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:52 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:52 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:52 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 128ms (128ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:52 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:52 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:53 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-37a4fc22-a539-4059-a607-4ae376d331cc" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 81ms (81ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 93ms (93ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 112ms (112ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:54 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 69ms (69ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 97ms (97ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 75ms (75ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:31:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:12 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:13 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:13 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 103ms (103ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:13 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:13 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 232ms (232ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 88ms (88ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:32:14 Waiting for MySQL ready state 2025/09/11 06:32:14 MySQL is ready 2025/09/11 06:32:14 Peers: [3734663465303034.demand-backup-mysql-unready.kuttl-test-safe-ladybug 6163343238393665.demand-backup-mysql-unready.kuttl-test-safe-ladybug 6265663461656238.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:32:14 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:32:14 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:32:14 lookup demand-backup-mysql-2 [10.182.34.26] 2025/09/11 06:32:14 PodIP: 10.182.34.26 2025/09/11 06:32:14 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.19] 2025/09/11 06:32:14 PrimaryIP: 10.182.32.19 2025/09/11 06:32:14 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:32:14 Opening connection to 10.182.34.26 2025/09/11 06:32:14 Clone required: true 2025/09/11 06:32:14 Checking if a clone in progress 2025/09/11 06:32:14 Clone in progress: false 2025/09/11 06:32:14 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:32:15 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:15 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:32:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 70ms (70ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:03 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-demand-backup-azure-azure-blob-hc87d to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:03 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:03 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 127ms (127ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:03 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob SuccessfulCreate Created pod: xb-demand-backup-azure-azure-blob-hc87d job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:04 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:04 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:06 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:06 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 87ms (87ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:06 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:06 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-hc87d.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:12 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:28 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:28 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:30 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:30 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:31 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:31 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:31 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:31 +0000 UTC Warning Pod demand-backup-orc-2.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.182.32.20:3000/api/health": dial tcp 10.182.32.20:3000: connect: connection refused kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:39 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/09/11 06:33:39 MySQL state is not ready... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:44 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:50 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:50 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:51 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:51 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:58 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/xb-restore-demand-backup-restore-azure-kwwpv to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:58 +0000 UTC Warning Pod xb-restore-demand-backup-restore-azure-kwwpv FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:33:58 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure SuccessfulCreate Created pod: xb-restore-demand-backup-restore-azure-kwwpv job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:17 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:18 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:18 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 137ms (137ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:18 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:18 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:20 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:20 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 93ms (93ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:20 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:20 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-kwwpv.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:29 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure Completed Job completed job-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:30 +0000 UTC Normal Pod demand-backup-mysql-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:30 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:30 +0000 UTC Normal Pod demand-backup-orc-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 142ms (142ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 78ms (78ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 86ms (86ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:49 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-b930944c-1fcc-4196-ab24-d05cbf7e376c" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 104ms (104ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 83ms (83ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 84ms (84ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 112ms (112ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:34:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:06 +0000 UTC Normal Pod demand-backup-orc-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 123ms (123ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 88ms (88ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 146ms (146ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:26 +0000 UTC Normal Pod demand-backup-mysql-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:28 +0000 UTC Normal Pod demand-backup-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-0 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 179ms (179ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 98ms (98ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 68ms (68ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:31 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:36 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-63901865-de41-42b5-95cc-af65b78dab8a" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 167ms (167ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 72ms (72ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 105ms (105ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 91ms (91ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:42 +0000 UTC Normal Pod demand-backup-orc-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-orc-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 101ms (101ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 106ms (106ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 98ms (98ms including waiting). Image size: 72483643 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:49 +0000 UTC Warning Pod demand-backup-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:58 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:35:57 Waiting for MySQL ready state 2025/09/11 06:35:57 MySQL is ready 2025/09/11 06:35:57 Peers: [3331376166346461.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3431636533643138.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:35:57 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:35:57 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:35:57 lookup demand-backup-mysql-1 [10.182.33.34] 2025/09/11 06:35:57 PodIP: 10.182.33.34 2025/09/11 06:35:57 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.22] 2025/09/11 06:35:57 PrimaryIP: 10.182.32.22 2025/09/11 06:35:57 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:35:57 Opening connection to 10.182.33.34 2025/09/11 06:35:57 Clone required: true 2025/09/11 06:35:57 Checking if a clone in progress 2025/09/11 06:35:57 Clone in progress: false 2025/09/11 06:35:57 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:35:58 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:35:58 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:02 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 114ms (114ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:33 +0000 UTC Normal Pod demand-backup-mysql-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-mysql-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:34 +0000 UTC Normal Pod demand-backup-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-1 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-kxft default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:34 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:34 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 91ms (91ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:34 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:34 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:36 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:36 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 88ms (88ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:36 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:36 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:36 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:36 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 88ms (88ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:36 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:36 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:43 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-37a4fc22-a539-4059-a607-4ae376d331cc" attachdetach-controller logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 131ms (131ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 76ms (76ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 101ms (101ms including waiting). Image size: 425449133 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:46 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:47 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 98ms (98ms including waiting). Image size: 133947261 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:47 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:47 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:54 +0000 UTC Normal Pod demand-backup-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/demand-backup-haproxy-2 to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-mpn6 default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1041-fa9862d8" in 158ms (158ms including waiting). Image size: 109942953 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 88ms (88ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 92ms (92ms including waiting). Image size: 105415370 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:36:57 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:37:05 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/09/11 06:37:04 Waiting for MySQL ready state 2025/09/11 06:37:04 MySQL is ready 2025/09/11 06:37:04 Peers: [3331376166346461.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3366363236346137.demand-backup-mysql-unready.kuttl-test-safe-ladybug 3431636533643138.demand-backup-mysql-unready.kuttl-test-safe-ladybug] 2025/09/11 06:37:04 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:37:04 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug demand-backup-mysql-2.demand-backup-mysql.kuttl-test-safe-ladybug] 2025/09/11 06:37:04 lookup demand-backup-mysql-2 [10.182.34.29] 2025/09/11 06:37:04 PodIP: 10.182.34.29 2025/09/11 06:37:04 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-safe-ladybug [10.182.32.22] 2025/09/11 06:37:04 PrimaryIP: 10.182.32.22 2025/09/11 06:37:04 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:37:04 Opening connection to 10.182.34.29 2025/09/11 06:37:04 Clone required: true 2025/09/11 06:37:04 Checking if a clone in progress 2025/09/11 06:37:04 Clone in progress: false 2025/09/11 06:37:04 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-safe-ladybug 2025/09/11 06:37:05 Clone finished. Restarting container... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:37:05 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:37:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 101ms (101ms including waiting). Image size: 417077206 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:03 +0000 UTC Normal Pod aws-cli Binding Scheduled Successfully assigned kuttl-test-safe-ladybug/aws-cli to gke-jen-ps-1041-fa9862d8-default-pool-3152445c-djlw default-scheduler logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:03 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:03 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 74ms (74ms including waiting). Image size: 30314917 bytes. kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:03 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container: aws-cli kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:03 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Warning PodDisruptionBudget.policy demand-backup-haproxy CalculateExpectedPodCountFailed Failed to calculate the number of expected pods: found no controllers for pod "demand-backup-haproxy-0" controllermanager logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Warning PodDisruptionBudget.policy demand-backup-haproxy CalculateExpectedPodCountFailed Failed to calculate the number of expected pods: found no controllers for pod "demand-backup-haproxy-2" controllermanager logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:08 +0000 UTC Warning PodDisruptionBudget.policy demand-backup-orchestrator CalculateExpectedPodCountFailed Failed to calculate the number of expected pods: found no controllers for pod "demand-backup-orc-2" controllermanager logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:09 +0000 UTC Warning PodDisruptionBudget.policy demand-backup-mysql CalculateExpectedPodCountFailed Failed to calculate the number of expected pods: found no controllers for pod "demand-backup-mysql-0" controllermanager logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:11 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/09/11 06:38:10 MySQL state is not ready... kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:11 +0000 UTC Warning Pod demand-backup-orc-1.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.182.34.28:3000/api/health": dial tcp 10.182.34.28:3000: connect: connection refused kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:12 +0000 UTC Warning Pod demand-backup-orc-2.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.182.32.23:3000/api/health": dial tcp 10.182.32.23:3000: connect: connection refused kubelet logger.go:42: 06:38:17 | demand-backup | 2025-09-11 06:38:15 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/09/11 06:38:15 MySQL state is not ready... kubelet logger.go:42: 06:38:18 | demand-backup | Deleting namespace: kuttl-test-safe-ladybug === NAME kuttl harness.go:403: run tests finished harness.go:510: cleaning up harness.go:567: removing temp folder: "" --- PASS: kuttl (1972.35s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup (1971.59s) PASS