=== RUN kuttl harness.go:460: starting setup harness.go:258: running tests using configured kubeconfig. harness.go:281: Successful connection to cluster at: https://34.135.191.104 harness.go:366: running tests harness.go:77: going to run test suite with timeout of 180 seconds for each step harness.go:378: testsuite: e2e-tests/tests has 46 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup-cloud === PAUSE kuttl/harness/demand-backup-cloud === CONT kuttl/harness/demand-backup-cloud logger.go:42: 11:23:47 | demand-backup-cloud | Creating namespace "kuttl-test-bright-herring" logger.go:42: 11:23:48 | demand-backup-cloud/0-minio-secret | starting test step 0-minio-secret logger.go:42: 11:23:48 | demand-backup-cloud/0-minio-secret | Secret:kuttl-test-bright-herring/minio-secret created logger.go:42: 11:23:49 | demand-backup-cloud/0-minio-secret | test step completed 0-minio-secret logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | starting test step 1-deploy-operator logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep apply_s3_storage_secrets deploy_operator deploy_tls_cluster_secrets deploy_client] logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | + source ../../functions logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ realpath ../../.. logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | ++++ pwd logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | ++ test_name=demand-backup-cloud logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ [[ -z 8.4 ]] logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ MYSQL_VERSION=8.4 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export MINIO_VER=5.4.0 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ MINIO_VER=5.4.0 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ export VAULT_VER=0.16.1 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ VAULT_VER=0.16.1 logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | ++++ which gdate logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | ++++ which date logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ date=/usr/sbin/date logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ oc get projects logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ : logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ kubectl get nodes logger.go:42: 11:23:49 | demand-backup-cloud/1-deploy-operator | +++ grep '^minikube' logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | +++ which gsed logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | +++ which sed logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | ++ sed=/usr/sbin/sed logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | ++ oc get projects logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | +++ kubectl version -o json logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | +++ grep '\-eks\-' logger.go:42: 11:23:50 | demand-backup-cloud/1-deploy-operator | grep: warning: stray \ before - logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | ++ '[' ']' logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | ++ EKS=0 logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | + init_temp_dir logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | + rm -rf /tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | + mkdir -p /tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | + apply_s3_storage_secrets logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | + apply_minio_secret logger.go:42: 11:23:51 | demand-backup-cloud/1-deploy-operator | + kubectl -n kuttl-test-bright-herring apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf/minio-secret.yml logger.go:42: 11:23:52 | demand-backup-cloud/1-deploy-operator | Warning: resource secrets/minio-secret is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 11:23:52 | demand-backup-cloud/1-deploy-operator | secret/minio-secret configured logger.go:42: 11:23:52 | demand-backup-cloud/1-deploy-operator | + kubectl -n kuttl-test-bright-herring apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf/cloud-secret.yml logger.go:42: 11:23:54 | demand-backup-cloud/1-deploy-operator | secret/aws-s3-secret created logger.go:42: 11:23:54 | demand-backup-cloud/1-deploy-operator | secret/do-spaces-secret created logger.go:42: 11:23:54 | demand-backup-cloud/1-deploy-operator | secret/gcp-cs-secret created logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | secret/azure-secret created logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | + deploy_operator logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | + destroy_operator logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | Error from server (NotFound): deployments.apps "percona-server-mysql-operator" not found logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | + true logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 11:23:55 | demand-backup-cloud/1-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 11:23:56 | demand-backup-cloud/1-deploy-operator | Error from server (NotFound): namespaces "ps-operator" not found logger.go:42: 11:23:56 | demand-backup-cloud/1-deploy-operator | + true logger.go:42: 11:23:56 | demand-backup-cloud/1-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 11:23:56 | demand-backup-cloud/1-deploy-operator | + create_namespace ps-operator logger.go:42: 11:23:56 | demand-backup-cloud/1-deploy-operator | + local namespace=ps-operator logger.go:42: 11:23:56 | demand-backup-cloud/1-deploy-operator | + [[ -n '' ]] logger.go:42: 11:23:56 | demand-backup-cloud/1-deploy-operator | + kubectl delete namespace ps-operator --ignore-not-found logger.go:42: 11:23:56 | demand-backup-cloud/1-deploy-operator | + kubectl wait --for=delete namespace ps-operator logger.go:42: 11:23:57 | demand-backup-cloud/1-deploy-operator | + kubectl create namespace ps-operator logger.go:42: 11:23:58 | demand-backup-cloud/1-deploy-operator | namespace/ps-operator created logger.go:42: 11:23:58 | demand-backup-cloud/1-deploy-operator | + apply_crd logger.go:42: 11:23:58 | demand-backup-cloud/1-deploy-operator | + kubectl -n ps-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy/crd.yaml logger.go:42: 11:23:59 | demand-backup-cloud/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 11:23:59 | demand-backup-cloud/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 11:24:00 | demand-backup-cloud/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 11:24:00 | demand-backup-cloud/1-deploy-operator | + apply_rbac logger.go:42: 11:24:00 | demand-backup-cloud/1-deploy-operator | + local rbac_file logger.go:42: 11:24:00 | demand-backup-cloud/1-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 11:24:00 | demand-backup-cloud/1-deploy-operator | + rbac_file=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy/cw-rbac.yaml logger.go:42: 11:24:00 | demand-backup-cloud/1-deploy-operator | + kubectl -n ps-operator apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy/cw-rbac.yaml logger.go:42: 11:24:02 | demand-backup-cloud/1-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 11:24:02 | demand-backup-cloud/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | + local operator_file logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | + operator_file=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy/cw-operator.yaml logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "VERBOSE"' logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | + kubectl -n ps-operator apply -f - logger.go:42: 11:24:03 | demand-backup-cloud/1-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy/cw-operator.yaml logger.go:42: 11:24:05 | demand-backup-cloud/1-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 11:24:06 | demand-backup-cloud/1-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 11:24:06 | demand-backup-cloud/1-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 11:24:06 | demand-backup-cloud/1-deploy-operator | + kubectl -n kuttl-test-bright-herring apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf/ssl-secret.yaml logger.go:42: 11:24:07 | demand-backup-cloud/1-deploy-operator | secret/test-ssl created logger.go:42: 11:24:07 | demand-backup-cloud/1-deploy-operator | + deploy_client logger.go:42: 11:24:07 | demand-backup-cloud/1-deploy-operator | + kubectl -n kuttl-test-bright-herring apply -f - logger.go:42: 11:24:07 | demand-backup-cloud/1-deploy-operator | ++ printf '.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:24:07 | demand-backup-cloud/1-deploy-operator | + yq eval '.spec.containers[0].image="perconalab/percona-server-mysql-operator:main-psmysql8.4"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf/client.yaml logger.go:42: 11:24:08 | demand-backup-cloud/1-deploy-operator | pod/mysql-client created logger.go:42: 11:24:09 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:09 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:10 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:11 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:11 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:12 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:13 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:13 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:13 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:15 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:15 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:15 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:17 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:17 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:17 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:19 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:19 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:19 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:21 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:21 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:21 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:23 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:23 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:23 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:25 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:25 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:25 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:27 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:27 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:27 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:29 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:29 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:29 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:30 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:31 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:31 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:32 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:32 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:33 | demand-backup-cloud/1-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 11:24:34 | demand-backup-cloud/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 11:24:34 | demand-backup-cloud/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 11:24:35 | demand-backup-cloud/1-deploy-operator | INFO Found 1 resource(s). logger.go:42: 11:24:35 | demand-backup-cloud/1-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 11:24:35 | demand-backup-cloud/1-deploy-operator | percona-server-mysql-operator ps-operator 1 logger.go:42: 11:24:35 | demand-backup-cloud/1-deploy-operator | ASSERT PASS logger.go:42: 11:24:35 | demand-backup-cloud/1-deploy-operator | test step completed 1-deploy-operator logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | starting test step 2-create-cluster logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval ".spec.mysql.size=3" - \ | yq eval ".spec.proxy.haproxy.enabled=true" - \ | yq eval ".spec.proxy.haproxy.size=3" - \ | yq eval ".spec.orchestrator.enabled=true" - \ | yq eval ".spec.orchestrator.size=3" - \ | yq eval '.spec.backup.storages.aws-s3.type="s3"' - \ | yq eval ".spec.backup.storages.aws-s3.verifyTLS=true" - \ | yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - \ | yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - \ | yq eval ".spec.backup.storages.gcp-cs.verifyTLS=true" - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - \ | yq eval '.spec.backup.storages.azure-blob.type="azure"' - \ | yq eval ".spec.backup.storages.azure-blob.verifyTLS=true" - \ | yq eval '.spec.backup.storages.azure-blob.azure.container="operator-testing"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | + source ../../functions logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ realpath ../../.. logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++++ pwd logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++ test_name=demand-backup-cloud logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ [[ -z 8.4 ]] logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ MYSQL_VERSION=8.4 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export MINIO_VER=5.4.0 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ MINIO_VER=5.4.0 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ export VAULT_VER=0.16.1 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ VAULT_VER=0.16.1 logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++++ which gdate logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++++ which date logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ date=/usr/sbin/date logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ oc get projects logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ : logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ kubectl get nodes logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ grep '^minikube' logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ which gsed logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ which sed logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++ sed=/usr/sbin/sed logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | ++ oc get projects logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ kubectl version -o json logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | +++ grep '\-eks\-' logger.go:42: 11:24:35 | demand-backup-cloud/2-create-cluster | grep: warning: stray \ before - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ '[' ']' logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ EKS=0 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + get_cr logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local name_suffix= logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local image_mysql=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local image_backup=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local image_orchestrator=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local image_router=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local image_toolkit=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local image_haproxy=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local image_pmm_client=perconalab/pmm-client:3-dev-latest logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + local cr_file=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy/cr.yaml logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.mysql.size=3 - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval 'del(.spec.secretsName)' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.metadata.name="%s"' demand-backup-cloud logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.mysql.gracePeriod=30 - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.spec.initContainer.image="%s"' perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.metadata.name="demand-backup-cloud"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy/cr.yaml logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.initContainer.image="perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.orchestrator.size=3 - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + '[' -n '' ']' logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.type="s3"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router8.4"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup8.4"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.backup.storages.aws-s3.verifyTLS=true - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql8.4"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.backup.storages.gcp-cs.verifyTLS=true - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.type="azure"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.container="operator-testing"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval .spec.backup.storages.azure-blob.verifyTLS=true - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - logger.go:42: 11:24:36 | demand-backup-cloud/2-create-cluster | + kubectl -n kuttl-test-bright-herring apply -f - logger.go:42: 11:24:38 | demand-backup-cloud/2-create-cluster | perconaservermysql.ps.percona.com/demand-backup-cloud created logger.go:42: 11:29:19 | demand-backup-cloud/2-create-cluster | test step completed 2-create-cluster logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | starting test step 3-write-data logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name))" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name))"] logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | + source ../../functions logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ realpath ../../.. logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | ++++ pwd logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | ++ test_name=demand-backup-cloud logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ [[ -z 8.4 ]] logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ MYSQL_VERSION=8.4 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export MINIO_VER=5.4.0 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ MINIO_VER=5.4.0 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ export VAULT_VER=0.16.1 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ VAULT_VER=0.16.1 logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | ++++ which gdate logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | ++++ which date logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ date=/usr/sbin/date logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ oc get projects logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ : logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ kubectl get nodes logger.go:42: 11:29:19 | demand-backup-cloud/3-write-data | +++ grep '^minikube' logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | +++ which gsed logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | +++ which sed logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | ++ sed=/usr/sbin/sed logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | ++ oc get projects logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | +++ kubectl version -o json logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | +++ grep '\-eks\-' logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | grep: warning: stray \ before - logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | ++ '[' ']' logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | ++ EKS=0 logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | +++ get_cluster_name logger.go:42: 11:29:20 | demand-backup-cloud/3-write-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | ++ get_haproxy_svc demand-backup-cloud logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | ++ local cluster=demand-backup-cloud logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | ++ echo demand-backup-cloud-haproxy logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h demand-backup-cloud-haproxy' logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | + local 'host=-h demand-backup-cloud-haproxy' logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | ++ get_user_pass root logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | ++ local user=root logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | +++ get_cluster_name logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | ++ local secret=demand-backup-cloud-secrets logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | ++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:29:21 | demand-backup-cloud/3-write-data | ++ base64 --decode logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | + local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | + local pod= logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | ++ get_client_pod logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | ++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | + client_pod=mysql-client logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | + wait_pod mysql-client logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | + local pod=mysql-client logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | + local ns=kuttl-test-bright-herring logger.go:42: 11:29:22 | demand-backup-cloud/3-write-data | + set +o xtrace logger.go:42: 11:29:23 | demand-backup-cloud/3-write-data | mysql-clienttrue logger.go:42: 11:29:23 | demand-backup-cloud/3-write-data | + kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h demand-backup-cloud-haproxy -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:29:23 | demand-backup-cloud/3-write-data | + /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:29:23 | demand-backup-cloud/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | + : logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | +++ get_cluster_name logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | ++ get_haproxy_svc demand-backup-cloud logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | ++ local cluster=demand-backup-cloud logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | ++ echo demand-backup-cloud-haproxy logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h demand-backup-cloud-haproxy' logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)' logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | + local 'host=-h demand-backup-cloud-haproxy' logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | ++ get_user_pass root logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | ++ local user=root logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | +++ get_cluster_name logger.go:42: 11:29:24 | demand-backup-cloud/3-write-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:29:25 | demand-backup-cloud/3-write-data | ++ local secret=demand-backup-cloud-secrets logger.go:42: 11:29:25 | demand-backup-cloud/3-write-data | ++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:29:25 | demand-backup-cloud/3-write-data | ++ base64 --decode logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | + local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | + local pod= logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | ++ get_client_pod logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | ++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | + client_pod=mysql-client logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | + wait_pod mysql-client logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | + local pod=mysql-client logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | + local ns=kuttl-test-bright-herring logger.go:42: 11:29:26 | demand-backup-cloud/3-write-data | + set +o xtrace logger.go:42: 11:29:27 | demand-backup-cloud/3-write-data | mysql-clienttrue logger.go:42: 11:29:27 | demand-backup-cloud/3-write-data | + kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h demand-backup-cloud-haproxy -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:29:27 | demand-backup-cloud/3-write-data | + /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:29:27 | demand-backup-cloud/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:29:28 | demand-backup-cloud/3-write-data | + : logger.go:42: 11:29:28 | demand-backup-cloud/3-write-data | test step completed 3-write-data logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | starting test step 4-move-primary-before-backup logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary_pod_from_label="$(get_primary_from_label)" kubectl delete pod -n ${NAMESPACE} ${primary_pod_from_label} wait_cluster_consistency_async "${test_name}" "3" "3" new_primary_pod_from_label="$(get_primary_from_label)" if [ "${primary_pod_from_label}" == "${new_primary_pod_from_label}" ]; then echo "Old (${primary_pod_from_label}) and new (${new_primary_pod_from_label}) primary are the same (the failover didn't happen)!" exit 1 fi] logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | + source ../../functions logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ realpath ../../.. logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | ++++ pwd logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | ++ test_name=demand-backup-cloud logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ [[ -z 8.4 ]] logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ MYSQL_VERSION=8.4 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export MINIO_VER=5.4.0 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ MINIO_VER=5.4.0 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ export VAULT_VER=0.16.1 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ VAULT_VER=0.16.1 logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | ++++ which gdate logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | ++++ which date logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ date=/usr/sbin/date logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ oc get projects logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ : logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ kubectl get nodes logger.go:42: 11:29:28 | demand-backup-cloud/4-move-primary-before-backup | +++ grep '^minikube' logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | +++ which gsed logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | +++ which sed logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | ++ sed=/usr/sbin/sed logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | ++ oc get projects logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | +++ kubectl version -o json logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | +++ grep '\-eks\-' logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | grep: warning: stray \ before - logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | ++ '[' ']' logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | ++ EKS=0 logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 11:29:29 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl -n kuttl-test-bright-herring get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 11:29:30 | demand-backup-cloud/4-move-primary-before-backup | + primary_pod_from_label=demand-backup-cloud-mysql-0 logger.go:42: 11:29:30 | demand-backup-cloud/4-move-primary-before-backup | + kubectl delete pod -n kuttl-test-bright-herring demand-backup-cloud-mysql-0 logger.go:42: 11:29:30 | demand-backup-cloud/4-move-primary-before-backup | pod "demand-backup-cloud-mysql-0" deleted from kuttl-test-bright-herring namespace logger.go:42: 11:29:50 | demand-backup-cloud/4-move-primary-before-backup | + wait_cluster_consistency_async demand-backup-cloud 3 3 logger.go:42: 11:29:50 | demand-backup-cloud/4-move-primary-before-backup | + local cluster_name=demand-backup-cloud logger.go:42: 11:29:50 | demand-backup-cloud/4-move-primary-before-backup | + local cluster_size=3 logger.go:42: 11:29:50 | demand-backup-cloud/4-move-primary-before-backup | + local orc_size=3 logger.go:42: 11:29:50 | demand-backup-cloud/4-move-primary-before-backup | + '[' -z 3 ']' logger.go:42: 11:29:50 | demand-backup-cloud/4-move-primary-before-backup | + sleep 7 logger.go:42: 11:29:57 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.mysql.state}' logger.go:42: 11:29:58 | demand-backup-cloud/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 11:29:58 | demand-backup-cloud/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 11:29:58 | demand-backup-cloud/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 11:29:58 | demand-backup-cloud/4-move-primary-before-backup | + sleep 15 logger.go:42: 11:30:13 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.mysql.state}' logger.go:42: 11:30:14 | demand-backup-cloud/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 11:30:14 | demand-backup-cloud/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 11:30:14 | demand-backup-cloud/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 11:30:14 | demand-backup-cloud/4-move-primary-before-backup | + sleep 15 logger.go:42: 11:30:29 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.mysql.state}' logger.go:42: 11:30:29 | demand-backup-cloud/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 11:30:29 | demand-backup-cloud/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 11:30:29 | demand-backup-cloud/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 11:30:29 | demand-backup-cloud/4-move-primary-before-backup | + sleep 15 logger.go:42: 11:30:44 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.mysql.state}' logger.go:42: 11:30:45 | demand-backup-cloud/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 11:30:45 | demand-backup-cloud/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 11:30:45 | demand-backup-cloud/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 11:30:45 | demand-backup-cloud/4-move-primary-before-backup | + sleep 15 logger.go:42: 11:31:00 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.mysql.state}' logger.go:42: 11:31:00 | demand-backup-cloud/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 11:31:00 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.mysql.ready}' logger.go:42: 11:31:01 | demand-backup-cloud/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 11:31:01 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.orchestrator.ready}' logger.go:42: 11:31:02 | demand-backup-cloud/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 11:31:02 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.orchestrator.state}' logger.go:42: 11:31:02 | demand-backup-cloud/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 11:31:02 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl get ps demand-backup-cloud -n kuttl-test-bright-herring -o 'jsonpath={.status.state}' logger.go:42: 11:31:03 | demand-backup-cloud/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 11:31:03 | demand-backup-cloud/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 11:31:03 | demand-backup-cloud/4-move-primary-before-backup | ++ kubectl -n kuttl-test-bright-herring get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 11:31:04 | demand-backup-cloud/4-move-primary-before-backup | + new_primary_pod_from_label=demand-backup-cloud-mysql-1 logger.go:42: 11:31:04 | demand-backup-cloud/4-move-primary-before-backup | + '[' demand-backup-cloud-mysql-0 == demand-backup-cloud-mysql-1 ']' logger.go:42: 11:31:04 | demand-backup-cloud/4-move-primary-before-backup | test step completed 4-move-primary-before-backup logger.go:42: 11:31:04 | demand-backup-cloud/5-create-backup-s3 | starting test step 5-create-backup-s3 logger.go:42: 11:31:05 | demand-backup-cloud/5-create-backup-s3 | PerconaServerMySQLBackup:kuttl-test-bright-herring/demand-backup-cloud-s3 created logger.go:42: 11:31:17 | demand-backup-cloud/5-create-backup-s3 | test step completed 5-create-backup-s3 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | starting test step 6-delete-data logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name))" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql") kubectl create configmap -n "${NAMESPACE}" 06-delete-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | + source ../../functions logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ realpath ../../.. logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | ++++ pwd logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | ++ test_name=demand-backup-cloud logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ [[ -z 8.4 ]] logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ MYSQL_VERSION=8.4 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ export VAULT_VER=0.16.1 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ VAULT_VER=0.16.1 logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | ++++ which gdate logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | ++++ which date logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ date=/usr/sbin/date logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ oc get projects logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ : logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ kubectl get nodes logger.go:42: 11:31:17 | demand-backup-cloud/6-delete-data | +++ grep '^minikube' logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | +++ which gsed logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | +++ which sed logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | ++ sed=/usr/sbin/sed logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | ++ oc get projects logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | +++ kubectl version -o json logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | +++ grep '\-eks\-' logger.go:42: 11:31:18 | demand-backup-cloud/6-delete-data | grep: warning: stray \ before - logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | ++ '[' ']' logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | ++ EKS=0 logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | +++ get_cluster_name logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | ++ get_haproxy_svc demand-backup-cloud logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | ++ local cluster=demand-backup-cloud logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | ++ echo demand-backup-cloud-haproxy logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-cloud-haproxy' logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | + local 'host=-h demand-backup-cloud-haproxy' logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | ++ get_user_pass root logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | ++ local user=root logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | +++ get_cluster_name logger.go:42: 11:31:19 | demand-backup-cloud/6-delete-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:31:20 | demand-backup-cloud/6-delete-data | ++ local secret=demand-backup-cloud-secrets logger.go:42: 11:31:20 | demand-backup-cloud/6-delete-data | ++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:31:20 | demand-backup-cloud/6-delete-data | ++ base64 --decode logger.go:42: 11:31:20 | demand-backup-cloud/6-delete-data | + local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:31:20 | demand-backup-cloud/6-delete-data | + local pod= logger.go:42: 11:31:20 | demand-backup-cloud/6-delete-data | ++ get_client_pod logger.go:42: 11:31:20 | demand-backup-cloud/6-delete-data | ++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | + client_pod=mysql-client logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | + wait_pod mysql-client logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | + local pod=mysql-client logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | + local ns=kuttl-test-bright-herring logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | + set +o xtrace logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | mysql-clienttrue logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | + kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-cloud-haproxy -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | + /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:31:21 | demand-backup-cloud/6-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:31:22 | demand-backup-cloud/6-delete-data | + : logger.go:42: 11:31:22 | demand-backup-cloud/6-delete-data | ++ get_cluster_name logger.go:42: 11:31:22 | demand-backup-cloud/6-delete-data | ++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | + cluster_name=demand-backup-cloud logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | + for i in 0 1 2 logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | +++ get_user_pass root logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | +++ local user=root logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | ++++ get_cluster_name logger.go:42: 11:31:23 | demand-backup-cloud/6-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:31:24 | demand-backup-cloud/6-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:31:24 | demand-backup-cloud/6-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:31:24 | demand-backup-cloud/6-delete-data | +++ base64 --decode logger.go:42: 11:31:24 | demand-backup-cloud/6-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:31:24 | demand-backup-cloud/6-delete-data | ++ local pod= logger.go:42: 11:31:24 | demand-backup-cloud/6-delete-data | +++ get_client_pod logger.go:42: 11:31:24 | demand-backup-cloud/6-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | ++ client_pod=mysql-client logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | ++ wait_pod mysql-client logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | ++ local pod=mysql-client logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | ++ set +o xtrace logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | mysql-clienttrue logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:31:25 | demand-backup-cloud/6-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:31:26 | demand-backup-cloud/6-delete-data | ++ : logger.go:42: 11:31:26 | demand-backup-cloud/6-delete-data | + data= logger.go:42: 11:31:26 | demand-backup-cloud/6-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 06-delete-data-s3-0 --from-literal=data= logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | configmap/06-delete-data-s3-0 created logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | + for i in 0 1 2 logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | +++ get_user_pass root logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | +++ local user=root logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | ++++ get_cluster_name logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:31:27 | demand-backup-cloud/6-delete-data | +++ base64 --decode logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | ++ local pod= logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | +++ get_client_pod logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | ++ client_pod=mysql-client logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | ++ wait_pod mysql-client logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | ++ local pod=mysql-client logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:31:28 | demand-backup-cloud/6-delete-data | ++ set +o xtrace logger.go:42: 11:31:29 | demand-backup-cloud/6-delete-data | mysql-clienttrue logger.go:42: 11:31:29 | demand-backup-cloud/6-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:31:29 | demand-backup-cloud/6-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:31:29 | demand-backup-cloud/6-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | ++ : logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | + data= logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 06-delete-data-s3-1 --from-literal=data= logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | configmap/06-delete-data-s3-1 created logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | + for i in 0 1 2 logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | +++ get_user_pass root logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | +++ local user=root logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | ++++ get_cluster_name logger.go:42: 11:31:30 | demand-backup-cloud/6-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:31:31 | demand-backup-cloud/6-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:31:31 | demand-backup-cloud/6-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:31:31 | demand-backup-cloud/6-delete-data | +++ base64 --decode logger.go:42: 11:31:31 | demand-backup-cloud/6-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:31:31 | demand-backup-cloud/6-delete-data | ++ local pod= logger.go:42: 11:31:31 | demand-backup-cloud/6-delete-data | +++ get_client_pod logger.go:42: 11:31:31 | demand-backup-cloud/6-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | ++ client_pod=mysql-client logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | ++ wait_pod mysql-client logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | ++ local pod=mysql-client logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | ++ set +o xtrace logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | mysql-clienttrue logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:31:32 | demand-backup-cloud/6-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:31:33 | demand-backup-cloud/6-delete-data | ++ : logger.go:42: 11:31:33 | demand-backup-cloud/6-delete-data | + data= logger.go:42: 11:31:33 | demand-backup-cloud/6-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 06-delete-data-s3-2 --from-literal=data= logger.go:42: 11:31:34 | demand-backup-cloud/6-delete-data | configmap/06-delete-data-s3-2 created logger.go:42: 11:31:35 | demand-backup-cloud/6-delete-data | test step completed 6-delete-data logger.go:42: 11:31:35 | demand-backup-cloud/7-restore-from-s3 | starting test step 7-restore-from-s3 logger.go:42: 11:31:35 | demand-backup-cloud/7-restore-from-s3 | PerconaServerMySQLRestore:kuttl-test-bright-herring/demand-backup-cloud-restore-s3 created logger.go:42: 11:35:52 | demand-backup-cloud/7-restore-from-s3 | test step completed 7-restore-from-s3 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | starting test step 8-read-data logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql") kubectl create configmap -n "${NAMESPACE}" 08-read-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | + source ../../functions logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ realpath ../../.. logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | ++++ pwd logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | ++ test_name=demand-backup-cloud logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ [[ -z 8.4 ]] logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ MYSQL_VERSION=8.4 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ export VAULT_VER=0.16.1 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ VAULT_VER=0.16.1 logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | ++++ which gdate logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | ++++ which date logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ date=/usr/sbin/date logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ oc get projects logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ : logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ kubectl get nodes logger.go:42: 11:35:52 | demand-backup-cloud/8-read-data | +++ grep '^minikube' logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | +++ which gsed logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | +++ which sed logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | ++ sed=/usr/sbin/sed logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | ++ oc get projects logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | +++ kubectl version -o json logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | +++ grep '\-eks\-' logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | grep: warning: stray \ before - logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | ++ '[' ']' logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | ++ EKS=0 logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | ++ get_cluster_name logger.go:42: 11:35:53 | demand-backup-cloud/8-read-data | ++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | + cluster_name=demand-backup-cloud logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | + for i in 0 1 2 logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | ++ local 'host=-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | +++ get_user_pass root logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | +++ local user=root logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | ++++ get_cluster_name logger.go:42: 11:35:54 | demand-backup-cloud/8-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:35:55 | demand-backup-cloud/8-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:35:55 | demand-backup-cloud/8-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:35:55 | demand-backup-cloud/8-read-data | +++ base64 --decode logger.go:42: 11:35:55 | demand-backup-cloud/8-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:35:55 | demand-backup-cloud/8-read-data | ++ local pod= logger.go:42: 11:35:55 | demand-backup-cloud/8-read-data | +++ get_client_pod logger.go:42: 11:35:55 | demand-backup-cloud/8-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | ++ client_pod=mysql-client logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | ++ wait_pod mysql-client logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | ++ local pod=mysql-client logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | ++ set +o xtrace logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | mysql-clienttrue logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:35:56 | demand-backup-cloud/8-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:35:57 | demand-backup-cloud/8-read-data | + data=100500 logger.go:42: 11:35:57 | demand-backup-cloud/8-read-data | + kubectl create configmap -n kuttl-test-bright-herring 08-read-data-s3-0 --from-literal=data=100500 logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | configmap/08-read-data-s3-0 created logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | + for i in 0 1 2 logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | ++ local 'host=-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | +++ get_user_pass root logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | +++ local user=root logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | ++++ get_cluster_name logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:35:58 | demand-backup-cloud/8-read-data | +++ base64 --decode logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | ++ local pod= logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | +++ get_client_pod logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | ++ client_pod=mysql-client logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | ++ wait_pod mysql-client logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | ++ local pod=mysql-client logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:35:59 | demand-backup-cloud/8-read-data | ++ set +o xtrace logger.go:42: 11:36:00 | demand-backup-cloud/8-read-data | mysql-clienttrue logger.go:42: 11:36:00 | demand-backup-cloud/8-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:00 | demand-backup-cloud/8-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:36:00 | demand-backup-cloud/8-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | + data=100500 logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | + kubectl create configmap -n kuttl-test-bright-herring 08-read-data-s3-1 --from-literal=data=100500 logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | configmap/08-read-data-s3-1 created logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | + for i in 0 1 2 logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | ++ local 'host=-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | +++ get_user_pass root logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | +++ local user=root logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | ++++ get_cluster_name logger.go:42: 11:36:01 | demand-backup-cloud/8-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:36:02 | demand-backup-cloud/8-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:36:02 | demand-backup-cloud/8-read-data | +++ base64 --decode logger.go:42: 11:36:02 | demand-backup-cloud/8-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:36:02 | demand-backup-cloud/8-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:02 | demand-backup-cloud/8-read-data | ++ local pod= logger.go:42: 11:36:02 | demand-backup-cloud/8-read-data | +++ get_client_pod logger.go:42: 11:36:02 | demand-backup-cloud/8-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | ++ client_pod=mysql-client logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | ++ wait_pod mysql-client logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | ++ local pod=mysql-client logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | ++ set +o xtrace logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | mysql-clienttrue logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:36:03 | demand-backup-cloud/8-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:36:04 | demand-backup-cloud/8-read-data | + data=100500 logger.go:42: 11:36:04 | demand-backup-cloud/8-read-data | + kubectl create configmap -n kuttl-test-bright-herring 08-read-data-s3-2 --from-literal=data=100500 logger.go:42: 11:36:05 | demand-backup-cloud/8-read-data | configmap/08-read-data-s3-2 created logger.go:42: 11:36:06 | demand-backup-cloud/8-read-data | test step completed 8-read-data logger.go:42: 11:36:06 | demand-backup-cloud/9-create-backup-gcp | starting test step 9-create-backup-gcp logger.go:42: 11:36:07 | demand-backup-cloud/9-create-backup-gcp | PerconaServerMySQLBackup:kuttl-test-bright-herring/demand-backup-cloud-gcp created logger.go:42: 11:36:20 | demand-backup-cloud/9-create-backup-gcp | test step completed 9-create-backup-gcp logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | starting test step 10-delete-data logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name))" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql") kubectl create configmap -n "${NAMESPACE}" 10-delete-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | + source ../../functions logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ realpath ../../.. logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++++ pwd logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++ test_name=demand-backup-cloud logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ [[ -z 8.4 ]] logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ MYSQL_VERSION=8.4 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ export VAULT_VER=0.16.1 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ VAULT_VER=0.16.1 logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++++ which gdate logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++++ which date logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ date=/usr/sbin/date logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ oc get projects logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ : logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ kubectl get nodes logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ grep '^minikube' logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ which gsed logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ which sed logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++ sed=/usr/sbin/sed logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | ++ oc get projects logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ kubectl version -o json logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | +++ grep '\-eks\-' logger.go:42: 11:36:20 | demand-backup-cloud/10-delete-data | grep: warning: stray \ before - logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | ++ '[' ']' logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | ++ EKS=0 logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | +++ get_cluster_name logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | ++ get_haproxy_svc demand-backup-cloud logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | ++ local cluster=demand-backup-cloud logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | ++ echo demand-backup-cloud-haproxy logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-cloud-haproxy' logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | + local 'host=-h demand-backup-cloud-haproxy' logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | ++ get_user_pass root logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | ++ local user=root logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | +++ get_cluster_name logger.go:42: 11:36:21 | demand-backup-cloud/10-delete-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:36:22 | demand-backup-cloud/10-delete-data | ++ local secret=demand-backup-cloud-secrets logger.go:42: 11:36:22 | demand-backup-cloud/10-delete-data | ++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:36:22 | demand-backup-cloud/10-delete-data | ++ base64 --decode logger.go:42: 11:36:22 | demand-backup-cloud/10-delete-data | + local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:22 | demand-backup-cloud/10-delete-data | + local pod= logger.go:42: 11:36:22 | demand-backup-cloud/10-delete-data | ++ get_client_pod logger.go:42: 11:36:22 | demand-backup-cloud/10-delete-data | ++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | + client_pod=mysql-client logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | + wait_pod mysql-client logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | + local pod=mysql-client logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | + local ns=kuttl-test-bright-herring logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | + set +o xtrace logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | mysql-clienttrue logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | + kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-cloud-haproxy -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | + /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:36:23 | demand-backup-cloud/10-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:36:24 | demand-backup-cloud/10-delete-data | + : logger.go:42: 11:36:24 | demand-backup-cloud/10-delete-data | ++ get_cluster_name logger.go:42: 11:36:24 | demand-backup-cloud/10-delete-data | ++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | + cluster_name=demand-backup-cloud logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | + for i in 0 1 2 logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | +++ get_user_pass root logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | +++ local user=root logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | ++++ get_cluster_name logger.go:42: 11:36:25 | demand-backup-cloud/10-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:36:26 | demand-backup-cloud/10-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:36:26 | demand-backup-cloud/10-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:36:26 | demand-backup-cloud/10-delete-data | +++ base64 --decode logger.go:42: 11:36:26 | demand-backup-cloud/10-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:26 | demand-backup-cloud/10-delete-data | ++ local pod= logger.go:42: 11:36:26 | demand-backup-cloud/10-delete-data | +++ get_client_pod logger.go:42: 11:36:26 | demand-backup-cloud/10-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | ++ client_pod=mysql-client logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | ++ wait_pod mysql-client logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | ++ local pod=mysql-client logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | ++ set +o xtrace logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | mysql-clienttrue logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:36:27 | demand-backup-cloud/10-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:36:28 | demand-backup-cloud/10-delete-data | ++ : logger.go:42: 11:36:28 | demand-backup-cloud/10-delete-data | + data= logger.go:42: 11:36:28 | demand-backup-cloud/10-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 10-delete-data-gcp-0 --from-literal=data= logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | configmap/10-delete-data-gcp-0 created logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | + for i in 0 1 2 logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | +++ get_user_pass root logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | +++ local user=root logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | ++++ get_cluster_name logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:36:29 | demand-backup-cloud/10-delete-data | +++ base64 --decode logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | ++ local pod= logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | +++ get_client_pod logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | ++ client_pod=mysql-client logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | ++ wait_pod mysql-client logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | ++ local pod=mysql-client logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:36:30 | demand-backup-cloud/10-delete-data | ++ set +o xtrace logger.go:42: 11:36:31 | demand-backup-cloud/10-delete-data | mysql-clienttrue logger.go:42: 11:36:31 | demand-backup-cloud/10-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:31 | demand-backup-cloud/10-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:36:31 | demand-backup-cloud/10-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | ++ : logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | + data= logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 10-delete-data-gcp-1 --from-literal=data= logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | configmap/10-delete-data-gcp-1 created logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | + for i in 0 1 2 logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | +++ get_user_pass root logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | +++ local user=root logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | ++++ get_cluster_name logger.go:42: 11:36:32 | demand-backup-cloud/10-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:36:33 | demand-backup-cloud/10-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:36:33 | demand-backup-cloud/10-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:36:33 | demand-backup-cloud/10-delete-data | +++ base64 --decode logger.go:42: 11:36:33 | demand-backup-cloud/10-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:33 | demand-backup-cloud/10-delete-data | ++ local pod= logger.go:42: 11:36:33 | demand-backup-cloud/10-delete-data | +++ get_client_pod logger.go:42: 11:36:33 | demand-backup-cloud/10-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | ++ client_pod=mysql-client logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | ++ wait_pod mysql-client logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | ++ local pod=mysql-client logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | ++ set +o xtrace logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | mysql-clienttrue logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:36:34 | demand-backup-cloud/10-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:36:35 | demand-backup-cloud/10-delete-data | ++ : logger.go:42: 11:36:35 | demand-backup-cloud/10-delete-data | + data= logger.go:42: 11:36:35 | demand-backup-cloud/10-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 10-delete-data-gcp-2 --from-literal=data= logger.go:42: 11:36:36 | demand-backup-cloud/10-delete-data | configmap/10-delete-data-gcp-2 created logger.go:42: 11:36:37 | demand-backup-cloud/10-delete-data | test step completed 10-delete-data logger.go:42: 11:36:37 | demand-backup-cloud/11-restore-from-gcp | starting test step 11-restore-from-gcp logger.go:42: 11:36:38 | demand-backup-cloud/11-restore-from-gcp | PerconaServerMySQLRestore:kuttl-test-bright-herring/demand-backup-cloud-restore-gcp created logger.go:42: 11:40:47 | demand-backup-cloud/11-restore-from-gcp | test step completed 11-restore-from-gcp logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | starting test step 12-read-data logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql") kubectl create configmap -n "${NAMESPACE}" 12-read-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | + source ../../functions logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ realpath ../../.. logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | ++++ pwd logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | ++ test_name=demand-backup-cloud logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ [[ -z 8.4 ]] logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ MYSQL_VERSION=8.4 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ export VAULT_VER=0.16.1 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ VAULT_VER=0.16.1 logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | ++++ which gdate logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | ++++ which date logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ date=/usr/sbin/date logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ oc get projects logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ : logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ kubectl get nodes logger.go:42: 11:40:47 | demand-backup-cloud/12-read-data | +++ grep '^minikube' logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | +++ which gsed logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | +++ which sed logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | ++ sed=/usr/sbin/sed logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | ++ oc get projects logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | +++ kubectl version -o json logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | +++ grep '\-eks\-' logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | grep: warning: stray \ before - logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | ++ '[' ']' logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | ++ EKS=0 logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | ++ get_cluster_name logger.go:42: 11:40:48 | demand-backup-cloud/12-read-data | ++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | + cluster_name=demand-backup-cloud logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | + for i in 0 1 2 logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | ++ local 'host=-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | +++ get_user_pass root logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | +++ local user=root logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | ++++ get_cluster_name logger.go:42: 11:40:49 | demand-backup-cloud/12-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:40:50 | demand-backup-cloud/12-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:40:50 | demand-backup-cloud/12-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:40:50 | demand-backup-cloud/12-read-data | +++ base64 --decode logger.go:42: 11:40:50 | demand-backup-cloud/12-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:40:50 | demand-backup-cloud/12-read-data | ++ local pod= logger.go:42: 11:40:50 | demand-backup-cloud/12-read-data | +++ get_client_pod logger.go:42: 11:40:50 | demand-backup-cloud/12-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | ++ client_pod=mysql-client logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | ++ wait_pod mysql-client logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | ++ local pod=mysql-client logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | ++ set +o xtrace logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | mysql-clienttrue logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:40:51 | demand-backup-cloud/12-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:40:52 | demand-backup-cloud/12-read-data | + data=100500 logger.go:42: 11:40:52 | demand-backup-cloud/12-read-data | + kubectl create configmap -n kuttl-test-bright-herring 12-read-data-gcp-0 --from-literal=data=100500 logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | configmap/12-read-data-gcp-0 created logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | + for i in 0 1 2 logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | ++ local 'host=-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | +++ get_user_pass root logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | +++ local user=root logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | ++++ get_cluster_name logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:40:53 | demand-backup-cloud/12-read-data | +++ base64 --decode logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | ++ local pod= logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | +++ get_client_pod logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | ++ client_pod=mysql-client logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | ++ wait_pod mysql-client logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | ++ local pod=mysql-client logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:40:54 | demand-backup-cloud/12-read-data | ++ set +o xtrace logger.go:42: 11:40:55 | demand-backup-cloud/12-read-data | mysql-clienttrue logger.go:42: 11:40:55 | demand-backup-cloud/12-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:40:55 | demand-backup-cloud/12-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:40:55 | demand-backup-cloud/12-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | + data=100500 logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | + kubectl create configmap -n kuttl-test-bright-herring 12-read-data-gcp-1 --from-literal=data=100500 logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | configmap/12-read-data-gcp-1 created logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | + for i in 0 1 2 logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | ++ local 'host=-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | +++ get_user_pass root logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | +++ local user=root logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | ++++ get_cluster_name logger.go:42: 11:40:56 | demand-backup-cloud/12-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:40:57 | demand-backup-cloud/12-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:40:57 | demand-backup-cloud/12-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:40:57 | demand-backup-cloud/12-read-data | +++ base64 --decode logger.go:42: 11:40:57 | demand-backup-cloud/12-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:40:57 | demand-backup-cloud/12-read-data | ++ local pod= logger.go:42: 11:40:57 | demand-backup-cloud/12-read-data | +++ get_client_pod logger.go:42: 11:40:57 | demand-backup-cloud/12-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | ++ client_pod=mysql-client logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | ++ wait_pod mysql-client logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | ++ local pod=mysql-client logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | ++ set +o xtrace logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | mysql-clienttrue logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:40:58 | demand-backup-cloud/12-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:40:59 | demand-backup-cloud/12-read-data | + data=100500 logger.go:42: 11:40:59 | demand-backup-cloud/12-read-data | + kubectl create configmap -n kuttl-test-bright-herring 12-read-data-gcp-2 --from-literal=data=100500 logger.go:42: 11:41:00 | demand-backup-cloud/12-read-data | configmap/12-read-data-gcp-2 created logger.go:42: 11:41:01 | demand-backup-cloud/12-read-data | test step completed 12-read-data logger.go:42: 11:41:01 | demand-backup-cloud/13-create-backup-azure | starting test step 13-create-backup-azure logger.go:42: 11:41:02 | demand-backup-cloud/13-create-backup-azure | PerconaServerMySQLBackup:kuttl-test-bright-herring/demand-backup-cloud-azure created logger.go:42: 11:41:15 | demand-backup-cloud/13-create-backup-azure | test step completed 13-create-backup-azure logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | starting test step 14-delete-data logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name))" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql") kubectl create configmap -n "${NAMESPACE}" 14-delete-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | + source ../../functions logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ realpath ../../.. logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++++ pwd logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++ test_name=demand-backup-cloud logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ [[ -z 8.4 ]] logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ MYSQL_VERSION=8.4 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ export VAULT_VER=0.16.1 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ VAULT_VER=0.16.1 logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++++ which gdate logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++++ which date logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ date=/usr/sbin/date logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ oc get projects logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ : logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ kubectl get nodes logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ grep '^minikube' logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ which gsed logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ which sed logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++ sed=/usr/sbin/sed logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | ++ oc get projects logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ kubectl version -o json logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | +++ grep '\-eks\-' logger.go:42: 11:41:15 | demand-backup-cloud/14-delete-data | grep: warning: stray \ before - logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | ++ '[' ']' logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | ++ EKS=0 logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | +++ get_cluster_name logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | ++ get_haproxy_svc demand-backup-cloud logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | ++ local cluster=demand-backup-cloud logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | ++ echo demand-backup-cloud-haproxy logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-cloud-haproxy' logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | + local 'host=-h demand-backup-cloud-haproxy' logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | ++ get_user_pass root logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | ++ local user=root logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | +++ get_cluster_name logger.go:42: 11:41:16 | demand-backup-cloud/14-delete-data | +++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:41:17 | demand-backup-cloud/14-delete-data | ++ local secret=demand-backup-cloud-secrets logger.go:42: 11:41:17 | demand-backup-cloud/14-delete-data | ++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:41:17 | demand-backup-cloud/14-delete-data | ++ base64 --decode logger.go:42: 11:41:17 | demand-backup-cloud/14-delete-data | + local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:41:17 | demand-backup-cloud/14-delete-data | + local pod= logger.go:42: 11:41:17 | demand-backup-cloud/14-delete-data | ++ get_client_pod logger.go:42: 11:41:17 | demand-backup-cloud/14-delete-data | ++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | + client_pod=mysql-client logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | + wait_pod mysql-client logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | + local pod=mysql-client logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | + local ns=kuttl-test-bright-herring logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | + set +o xtrace logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | mysql-clienttrue logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | + kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-cloud-haproxy -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | + /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:41:18 | demand-backup-cloud/14-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:41:19 | demand-backup-cloud/14-delete-data | + : logger.go:42: 11:41:19 | demand-backup-cloud/14-delete-data | ++ get_cluster_name logger.go:42: 11:41:19 | demand-backup-cloud/14-delete-data | ++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | + cluster_name=demand-backup-cloud logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | + for i in 0 1 2 logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | +++ get_user_pass root logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | +++ local user=root logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | ++++ get_cluster_name logger.go:42: 11:41:20 | demand-backup-cloud/14-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:41:21 | demand-backup-cloud/14-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:41:21 | demand-backup-cloud/14-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:41:21 | demand-backup-cloud/14-delete-data | +++ base64 --decode logger.go:42: 11:41:21 | demand-backup-cloud/14-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:41:21 | demand-backup-cloud/14-delete-data | ++ local pod= logger.go:42: 11:41:21 | demand-backup-cloud/14-delete-data | +++ get_client_pod logger.go:42: 11:41:21 | demand-backup-cloud/14-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | ++ client_pod=mysql-client logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | ++ wait_pod mysql-client logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | ++ local pod=mysql-client logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | ++ set +o xtrace logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | mysql-clienttrue logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:41:22 | demand-backup-cloud/14-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:41:23 | demand-backup-cloud/14-delete-data | ++ : logger.go:42: 11:41:23 | demand-backup-cloud/14-delete-data | + data= logger.go:42: 11:41:23 | demand-backup-cloud/14-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 14-delete-data-azure-0 --from-literal=data= logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | configmap/14-delete-data-azure-0 created logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | + for i in 0 1 2 logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | +++ get_user_pass root logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | +++ local user=root logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | ++++ get_cluster_name logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:41:24 | demand-backup-cloud/14-delete-data | +++ base64 --decode logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | ++ local pod= logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | +++ get_client_pod logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | ++ client_pod=mysql-client logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | ++ wait_pod mysql-client logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | ++ local pod=mysql-client logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:41:25 | demand-backup-cloud/14-delete-data | ++ set +o xtrace logger.go:42: 11:41:26 | demand-backup-cloud/14-delete-data | mysql-clienttrue logger.go:42: 11:41:26 | demand-backup-cloud/14-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:41:26 | demand-backup-cloud/14-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:41:26 | demand-backup-cloud/14-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | ++ : logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | + data= logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 14-delete-data-azure-1 --from-literal=data= logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | configmap/14-delete-data-azure-1 created logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | + for i in 0 1 2 logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | ++ local 'host=-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | +++ get_user_pass root logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | +++ local user=root logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | ++++ get_cluster_name logger.go:42: 11:41:27 | demand-backup-cloud/14-delete-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:41:28 | demand-backup-cloud/14-delete-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:41:28 | demand-backup-cloud/14-delete-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:41:28 | demand-backup-cloud/14-delete-data | +++ base64 --decode logger.go:42: 11:41:28 | demand-backup-cloud/14-delete-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:41:28 | demand-backup-cloud/14-delete-data | ++ local pod= logger.go:42: 11:41:28 | demand-backup-cloud/14-delete-data | +++ get_client_pod logger.go:42: 11:41:28 | demand-backup-cloud/14-delete-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | ++ client_pod=mysql-client logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | ++ wait_pod mysql-client logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | ++ local pod=mysql-client logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | ++ set +o xtrace logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | mysql-clienttrue logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:41:29 | demand-backup-cloud/14-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:41:30 | demand-backup-cloud/14-delete-data | ++ : logger.go:42: 11:41:30 | demand-backup-cloud/14-delete-data | + data= logger.go:42: 11:41:30 | demand-backup-cloud/14-delete-data | + kubectl create configmap -n kuttl-test-bright-herring 14-delete-data-azure-2 --from-literal=data= logger.go:42: 11:41:31 | demand-backup-cloud/14-delete-data | configmap/14-delete-data-azure-2 created logger.go:42: 11:41:32 | demand-backup-cloud/14-delete-data | test step completed 14-delete-data logger.go:42: 11:41:32 | demand-backup-cloud/15-restore-from-azure | starting test step 15-restore-from-azure logger.go:42: 11:41:33 | demand-backup-cloud/15-restore-from-azure | PerconaServerMySQLRestore:kuttl-test-bright-herring/demand-backup-cloud-restore-azure created logger.go:42: 11:45:42 | demand-backup-cloud/15-restore-from-azure | test step completed 15-restore-from-azure logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | starting test step 16-read-data logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql") kubectl create configmap -n "${NAMESPACE}" 16-read-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | + source ../../functions logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ realpath ../../.. logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | ++++ pwd logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | ++ test_name=demand-backup-cloud logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ [[ -z 8.4 ]] logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ MYSQL_VERSION=8.4 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export MINIO_VER=5.4.0 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ MINIO_VER=5.4.0 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ export VAULT_VER=0.16.1 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ VAULT_VER=0.16.1 logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | ++++ which gdate logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | ++++ which date logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ date=/usr/sbin/date logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ oc get projects logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ : logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ kubectl get nodes logger.go:42: 11:45:42 | demand-backup-cloud/16-read-data | +++ grep '^minikube' logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | +++ which gsed logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | +++ which sed logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | ++ sed=/usr/sbin/sed logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | ++ oc get projects logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | +++ kubectl version -o json logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | +++ grep '\-eks\-' logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | grep: warning: stray \ before - logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | ++ '[' ']' logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | ++ EKS=0 logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | ++ get_cluster_name logger.go:42: 11:45:43 | demand-backup-cloud/16-read-data | ++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | + cluster_name=demand-backup-cloud logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | + for i in 0 1 2 logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | ++ local 'host=-h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql' logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | +++ get_user_pass root logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | +++ local user=root logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | ++++ get_cluster_name logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:45:44 | demand-backup-cloud/16-read-data | +++ base64 --decode logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | ++ local pod= logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | +++ get_client_pod logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | ++ client_pod=mysql-client logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | ++ wait_pod mysql-client logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | ++ local pod=mysql-client logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:45:45 | demand-backup-cloud/16-read-data | ++ set +o xtrace logger.go:42: 11:45:46 | demand-backup-cloud/16-read-data | mysql-clienttrue logger.go:42: 11:45:46 | demand-backup-cloud/16-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-0.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:45:46 | demand-backup-cloud/16-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:45:46 | demand-backup-cloud/16-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | + data=100500 logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | + kubectl create configmap -n kuttl-test-bright-herring 16-read-data-azure-0 --from-literal=data=100500 logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | configmap/16-read-data-azure-0 created logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | + for i in 0 1 2 logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | ++ local 'host=-h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql' logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | +++ get_user_pass root logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | +++ local user=root logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | ++++ get_cluster_name logger.go:42: 11:45:47 | demand-backup-cloud/16-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:45:48 | demand-backup-cloud/16-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:45:48 | demand-backup-cloud/16-read-data | +++ base64 --decode logger.go:42: 11:45:48 | demand-backup-cloud/16-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:45:48 | demand-backup-cloud/16-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:45:48 | demand-backup-cloud/16-read-data | ++ local pod= logger.go:42: 11:45:48 | demand-backup-cloud/16-read-data | +++ get_client_pod logger.go:42: 11:45:48 | demand-backup-cloud/16-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | ++ client_pod=mysql-client logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | ++ wait_pod mysql-client logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | ++ local pod=mysql-client logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | ++ set +o xtrace logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | mysql-clienttrue logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-1.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:45:49 | demand-backup-cloud/16-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:45:50 | demand-backup-cloud/16-read-data | + data=100500 logger.go:42: 11:45:50 | demand-backup-cloud/16-read-data | + kubectl create configmap -n kuttl-test-bright-herring 16-read-data-azure-1 --from-literal=data=100500 logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | configmap/16-read-data-azure-1 created logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | + for i in 0 1 2 logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | ++ local 'host=-h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql' logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | +++ get_user_pass root logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | +++ local user=root logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | ++++ get_cluster_name logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | ++++ kubectl -n kuttl-test-bright-herring get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | +++ local secret=demand-backup-cloud-secrets logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | +++ kubectl -n kuttl-test-bright-herring get secret demand-backup-cloud-secrets -o 'jsonpath={.data.root}' logger.go:42: 11:45:51 | demand-backup-cloud/16-read-data | +++ base64 --decode logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | ++ local 'user=-uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | ++ local pod= logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | +++ get_client_pod logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | +++ kubectl -n kuttl-test-bright-herring get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | ++ client_pod=mysql-client logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | ++ wait_pod mysql-client logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | ++ local pod=mysql-client logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | ++ local ns=kuttl-test-bright-herring logger.go:42: 11:45:52 | demand-backup-cloud/16-read-data | ++ set +o xtrace logger.go:42: 11:45:53 | demand-backup-cloud/16-read-data | mysql-clienttrue logger.go:42: 11:45:53 | demand-backup-cloud/16-read-data | ++ kubectl -n kuttl-test-bright-herring exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-cloud-mysql-2.demand-backup-cloud-mysql -uroot -p'\''QAL0L]~5u?eNJD,-'\''' logger.go:42: 11:45:53 | demand-backup-cloud/16-read-data | ++ /usr/sbin/sed -e 's/mysql: //' logger.go:42: 11:45:53 | demand-backup-cloud/16-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 11:45:54 | demand-backup-cloud/16-read-data | + data=100500 logger.go:42: 11:45:54 | demand-backup-cloud/16-read-data | + kubectl create configmap -n kuttl-test-bright-herring 16-read-data-azure-2 --from-literal=data=100500 logger.go:42: 11:45:54 | demand-backup-cloud/16-read-data | configmap/16-read-data-azure-2 created logger.go:42: 11:45:56 | demand-backup-cloud/16-read-data | test step completed 16-read-data logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | starting test step 17-delete-all-backups logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete ps-backup --all -n "${NAMESPACE}"] logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | + source ../../functions logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ realpath ../../.. logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++++ pwd logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++ test_name=demand-backup-cloud logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ [[ -z 8.4 ]] logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ MYSQL_VERSION=8.4 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export MINIO_VER=5.4.0 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ MINIO_VER=5.4.0 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ export VAULT_VER=0.16.1 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ VAULT_VER=0.16.1 logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++++ which gdate logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++++ which date logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ date=/usr/sbin/date logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ oc get projects logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ : logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ kubectl get nodes logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ grep '^minikube' logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ which gsed logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ which sed logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++ sed=/usr/sbin/sed logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | ++ oc get projects logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ kubectl version -o json logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | +++ grep '\-eks\-' logger.go:42: 11:45:56 | demand-backup-cloud/17-delete-all-backups | grep: warning: stray \ before - logger.go:42: 11:45:57 | demand-backup-cloud/17-delete-all-backups | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:45:57 | demand-backup-cloud/17-delete-all-backups | ++ '[' ']' logger.go:42: 11:45:57 | demand-backup-cloud/17-delete-all-backups | ++ EKS=0 logger.go:42: 11:45:57 | demand-backup-cloud/17-delete-all-backups | + kubectl delete ps-backup --all -n kuttl-test-bright-herring logger.go:42: 11:45:57 | demand-backup-cloud/17-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-cloud-azure" deleted from kuttl-test-bright-herring namespace logger.go:42: 11:45:58 | demand-backup-cloud/17-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-cloud-gcp" deleted from kuttl-test-bright-herring namespace logger.go:42: 11:45:58 | demand-backup-cloud/17-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-cloud-s3" deleted from kuttl-test-bright-herring namespace logger.go:42: 11:46:00 | demand-backup-cloud/17-delete-all-backups | test step completed 17-delete-all-backups logger.go:42: 11:46:00 | demand-backup-cloud/98-drop-finalizer | starting test step 98-drop-finalizer logger.go:42: 11:46:01 | demand-backup-cloud/98-drop-finalizer | PerconaServerMySQL:kuttl-test-bright-herring/demand-backup-cloud updated logger.go:42: 11:46:01 | demand-backup-cloud/98-drop-finalizer | test step completed 98-drop-finalizer logger.go:42: 11:46:01 | demand-backup-cloud/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions destroy_operator] logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/tests/demand-backup-cloud logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | ++ test_name=demand-backup-cloud logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/vars.sh logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/deploy logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/e2e-tests/conf logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup-cloud logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-1251 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-1251 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export VERSION=PR-1251-a7b88ac1 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ VERSION=PR-1251-a7b88ac1 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ [[ -z 8.4 ]] logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export MYSQL_VERSION=8.4 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ MYSQL_VERSION=8.4 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.4 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.4 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.4 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export CERT_MANAGER_VER=1.19.1 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ CERT_MANAGER_VER=1.19.1 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export MINIO_VER=5.4.0 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ MINIO_VER=5.4.0 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ export VAULT_VER=0.16.1 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ VAULT_VER=0.16.1 logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | ++++ which date logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ date=/usr/sbin/date logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ oc get projects logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ : logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ kubectl get nodes logger.go:42: 11:46:02 | demand-backup-cloud/99-remove-cluster-gracefully | +++ grep '^minikube' logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | +++ which gsed logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1251/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | +++ which sed logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | ++ sed=/usr/sbin/sed logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | ++ oc get projects logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | +++ kubectl version -o json logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | +++ jq -r .serverVersion.gitVersion logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | +++ grep '\-eks\-' logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | grep: warning: stray \ before - logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | ++ '[' ']' logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | ++ EKS=0 logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 11:46:03 | demand-backup-cloud/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 11:46:04 | demand-backup-cloud/99-remove-cluster-gracefully | deployment.apps "percona-server-mysql-operator" force deleted from ps-operator namespace logger.go:42: 11:46:04 | demand-backup-cloud/99-remove-cluster-gracefully | + [[ -n ps-operator ]] logger.go:42: 11:46:04 | demand-backup-cloud/99-remove-cluster-gracefully | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 11:46:04 | demand-backup-cloud/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 11:46:04 | demand-backup-cloud/99-remove-cluster-gracefully | namespace "ps-operator" force deleted logger.go:42: 11:46:15 | demand-backup-cloud/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 11:46:16 | demand-backup-cloud | demand-backup-cloud events from ns kuttl-test-bright-herring: logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:08 +0000 UTC Normal Pod mysql-client Binding Scheduled Successfully assigned kuttl-test-bright-herring/mysql-client to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:09 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:30 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 21.298s (21.298s including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:30 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Created Created container: mysql-client kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:30 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Started Started container mysql-client kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:40 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:40 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:40 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-0" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:40 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulCreate create Claim datadir-demand-backup-cloud-mysql-0 Pod demand-backup-cloud-mysql-0 in StatefulSet demand-backup-cloud-mysql success statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:40 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-cloud-mysql NoPods No matching pods found controllermanager logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:40 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulCreate create Pod demand-backup-cloud-mysql-0 in StatefulSet demand-backup-cloud-mysql successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:41 +0000 UTC Normal Pod demand-backup-cloud-orc-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:41 +0000 UTC Warning Pod demand-backup-cloud-orc-0 FailedMount MountVolume.SetUp failed for volume "users" : secret "internal-demand-backup-cloud" not found kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:41 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-orc SuccessfulCreate create Pod demand-backup-cloud-orc-0 in StatefulSet demand-backup-cloud-orc successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:41 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-cloud-orchestrator NoPods No matching pods found controllermanager logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:41 +0000 UTC Normal PerconaServerMySQL.ps.percona.com demand-backup-cloud ClusterStateChanged -> Initializing ps-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:42 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:43 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0 pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:43 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 491ms (491ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:43 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:43 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:44 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:47 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:51 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 4.217s (4.218s including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:51 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:51 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:51 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:52 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 846ms (846ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:52 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:52 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:24:53 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 8.479s (8.479s including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:05 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:22 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 17.604s (17.604s including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:22 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:22 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:22 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:24 +0000 UTC Normal Pod demand-backup-cloud-orc-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:24 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-orc SuccessfulCreate create Pod demand-backup-cloud-orc-1 in StatefulSet demand-backup-cloud-orc successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:25 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:32 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 7.576s (7.576s including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:32 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:32 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:36 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 4.572s (4.572s including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 300ms (300ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 25.249s (25.249s including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:55 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 7.664s (7.664s including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:55 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:25:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:03 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:03 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:03 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-1" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:03 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulCreate create Claim datadir-demand-backup-cloud-mysql-1 Pod demand-backup-cloud-mysql-1 in StatefulSet demand-backup-cloud-mysql success statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:03 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulCreate create Pod demand-backup-cloud-mysql-1 in StatefulSet demand-backup-cloud-mysql successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:07 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-928812e7-f534-4007-90c5-a694efa98da8 pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:07 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:07 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-cloud-haproxy NoPods No matching pods found controllermanager logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:07 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-haproxy SuccessfulCreate create Pod demand-backup-cloud-haproxy-0 in StatefulSet demand-backup-cloud-haproxy successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:07 +0000 UTC Normal Pod demand-backup-cloud-mysql-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:08 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:08 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 284ms (284ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:08 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:08 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:10 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:12 +0000 UTC Normal Pod demand-backup-cloud-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-928812e7-f534-4007-90c5-a694efa98da8" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:13 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:13 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 309ms (309ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:13 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:13 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:14 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 4.152s (4.152s including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:14 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:14 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:14 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:14 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:14 +0000 UTC Normal Pod demand-backup-cloud-orc-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:14 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:14 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-orc SuccessfulCreate create Pod demand-backup-cloud-orc-2 in StatefulSet demand-backup-cloud-orc successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:15 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 261ms (261ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:15 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:15 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:15 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 844ms (844ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:15 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:15 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:21 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:25 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 4.666s (4.667s including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:26 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:26 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:26 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:26 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 819ms (819ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:26 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:26 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:32 +0000 UTC Warning Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:32 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 17.446s (17.446s including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:32 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:32 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:26:32 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 28.988s (28.988s including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:08 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 7.151s (7.151s including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:08 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:08 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:13 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:27:13 Waiting for MySQL ready state 2026/03/23 11:27:13 MySQL is ready 2026/03/23 11:27:13 Peers: [3263373934356439.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 6236343964373335.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:27:13 FQDN: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:27:13 Primary: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:27:13 lookup demand-backup-cloud-mysql-1 [10.74.216.10] 2026/03/23 11:27:13 PodIP: 10.74.216.10 2026/03/23 11:27:13 lookup demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.217.4] 2026/03/23 11:27:13 PrimaryIP: 10.74.217.4 2026/03/23 11:27:13 Donor: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:27:13 Opening connection to 10.74.216.10 2026/03/23 11:27:13 Clone required: true 2026/03/23 11:27:13 Checking if a clone in progress 2026/03/23 11:27:13 Clone in progress: false 2026/03/23 11:27:13 Cloning from demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:27:13 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:14 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:22 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 827ms (827ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:38 +0000 UTC Warning Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Unhealthy Liveness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:53 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:53 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:53 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-2" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:53 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulCreate create Claim datadir-demand-backup-cloud-mysql-2 Pod demand-backup-cloud-mysql-2 in StatefulSet demand-backup-cloud-mysql success statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:53 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulCreate create Pod demand-backup-cloud-mysql-2 in StatefulSet demand-backup-cloud-mysql successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:57 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-ac66ca02-064b-4c32-ac0b-c99beae29708 pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:27:57 +0000 UTC Normal Pod demand-backup-cloud-mysql-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:05 +0000 UTC Normal Pod demand-backup-cloud-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-ac66ca02-064b-4c32-ac0b-c99beae29708" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:06 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:06 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 310ms (310ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:06 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:06 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:08 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:09 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 270ms (270ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:09 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:09 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:09 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:19 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:19 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:19 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-haproxy SuccessfulCreate create Pod demand-backup-cloud-haproxy-1 in StatefulSet demand-backup-cloud-haproxy successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:20 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 863ms (863ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:20 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:20 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:22 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:25 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 16.166s (16.166s including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:25 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:25 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:25 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:27 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 5.227s (5.227s including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:27 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:27 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:27 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:28 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 811ms (811ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:28 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:28 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:33 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 8.272s (8.272s including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:34 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:34 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:37 +0000 UTC Warning Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:28:36 Waiting for MySQL ready state 2026/03/23 11:28:36 MySQL is ready 2026/03/23 11:28:36 Peers: [3263373934356439.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 3339333063633964.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 6236343964373335.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:28:36 FQDN: demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:28:36 Primary: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:28:36 lookup demand-backup-cloud-mysql-2 [10.74.218.7] 2026/03/23 11:28:36 PodIP: 10.74.218.7 2026/03/23 11:28:36 lookup demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.217.4] 2026/03/23 11:28:36 PrimaryIP: 10.74.217.4 2026/03/23 11:28:36 Donor: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:28:36 Opening connection to 10.74.218.7 2026/03/23 11:28:36 Clone required: true 2026/03/23 11:28:36 Checking if a clone in progress 2026/03/23 11:28:36 Clone in progress: false 2026/03/23 11:28:36 Cloning from demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:28:37 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:44 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 1.206s (1.206s including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:45 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:45 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-haproxy SuccessfulCreate create Pod demand-backup-cloud-haproxy-2 in StatefulSet demand-backup-cloud-haproxy successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:46 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:47 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 1.196s (1.196s including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:47 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:47 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:49 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:55 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 5.965s (5.965s including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:55 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:55 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:55 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:56 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 1.226s (1.226s including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:56 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:28:56 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:17 +0000 UTC Normal PerconaServerMySQL.ps.percona.com demand-backup-cloud ClusterStateChanged Initializing -> Ready ps-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:30 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:30 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:30 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:31 +0000 UTC Normal PerconaServerMySQL.ps.percona.com demand-backup-cloud ClusterStateChanged Ready -> Initializing ps-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:33 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:29:33 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:38 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:29:38 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:43 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:55 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 1.253s (1.253s including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:55 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:55 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:57 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:58 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 822ms (822ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:58 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:58 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:58 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 1.215s (1.215s including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:29:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:30:00 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 1.126s (1.126s including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:30:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:30:01 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:30:15 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:30:14 Waiting for MySQL ready state 2026/03/23 11:30:14 MySQL is ready 2026/03/23 11:30:14 Peers: [3263373934356439.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 3339333063633964.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 3661343137306565.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:30:14 FQDN: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:30:14 Primary: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:30:14 lookup demand-backup-cloud-mysql-0 [10.74.217.7] 2026/03/23 11:30:14 PodIP: 10.74.217.7 2026/03/23 11:30:14 lookup demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.216.10] 2026/03/23 11:30:14 PrimaryIP: 10.74.216.10 2026/03/23 11:30:14 Donor: demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:30:14 Opening connection to 10.74.217.7 2026/03/23 11:30:14 Clone required: true 2026/03/23 11:30:14 Checking if a clone in progress 2026/03/23 11:30:14 Clone in progress: false 2026/03/23 11:30:14 Cloning from demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:30:15 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:30:15 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:30:19 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 816ms (816ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:30:21 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 827ms (827ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:05 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt Binding Scheduled Successfully assigned kuttl-test-bright-herring/xb-demand-backup-cloud-s3-aws-s3-t7dnt to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:05 +0000 UTC Normal Job.batch xb-demand-backup-cloud-s3-aws-s3 SuccessfulCreate Created pod: xb-demand-backup-cloud-s3-aws-s3-t7dnt job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:06 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:06 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 344ms (344ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:06 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:06 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:08 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:08 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 859ms (859ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:08 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:09 +0000 UTC Normal Pod xb-demand-backup-cloud-s3-aws-s3-t7dnt.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:14 +0000 UTC Normal Job.batch xb-demand-backup-cloud-s3-aws-s3 Completed Job completed job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:36 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:36 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:36 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:36 +0000 UTC Warning Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:31:36 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:36 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulDelete delete Pod demand-backup-cloud-mysql-2 in StatefulSet demand-backup-cloud-mysql successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:36 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:36 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:36 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-orc SuccessfulDelete delete Pod demand-backup-cloud-orc-2 in StatefulSet demand-backup-cloud-orc successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:37 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-haproxy SuccessfulDelete delete Pod demand-backup-cloud-haproxy-2 in StatefulSet demand-backup-cloud-haproxy successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:37 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-haproxy SuccessfulDelete delete Pod demand-backup-cloud-haproxy-1 in StatefulSet demand-backup-cloud-haproxy successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:37 +0000 UTC Normal PerconaServerMySQL.ps.percona.com demand-backup-cloud ClusterStateChanged Ready -> Stopping ps-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:38 +0000 UTC Warning Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.74.217.5:3000/api/health": dial tcp 10.74.217.5:3000: connect: connection refused kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:39 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:39 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:39 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-haproxy SuccessfulDelete delete Pod demand-backup-cloud-haproxy-0 in StatefulSet demand-backup-cloud-haproxy successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:40 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:40 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:40 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:40 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulDelete delete Pod demand-backup-cloud-mysql-1 in StatefulSet demand-backup-cloud-mysql successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:41 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:41 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-orc SuccessfulDelete delete Pod demand-backup-cloud-orc-1 in StatefulSet demand-backup-cloud-orc successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:43 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:31:43 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:43 +0000 UTC Warning Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.74.216.9:3000/api/health": dial tcp 10.74.216.9:3000: connect: connection refused kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:48 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:31:48 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:31:53 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:00 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-mysql SuccessfulDelete delete Pod demand-backup-cloud-mysql-0 in StatefulSet demand-backup-cloud-mysql successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:02 +0000 UTC Normal PerconaServerMySQL.ps.percona.com demand-backup-cloud ClusterStateChanged Stopping -> Paused ps-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:10 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:10 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:10 +0000 UTC Normal StatefulSet.apps demand-backup-cloud-orc SuccessfulDelete delete Pod demand-backup-cloud-orc-0 in StatefulSet demand-backup-cloud-orc successful statefulset-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:17 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq Binding Scheduled Successfully assigned kuttl-test-bright-herring/xb-restore-demand-backup-cloud-restore-s3-xhvbq to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:17 +0000 UTC Normal Job.batch xb-restore-demand-backup-cloud-restore-s3 SuccessfulCreate Created pod: xb-restore-demand-backup-cloud-restore-s3-xhvbq job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:21 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:26 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:26 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 848ms (848ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:26 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:26 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:28 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:29 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 849ms (849ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:29 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:29 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-s3-xhvbq.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:37 +0000 UTC Normal Job.batch xb-restore-demand-backup-cloud-restore-s3 Completed Job completed job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:38 +0000 UTC Warning Pod demand-backup-cloud-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:38 +0000 UTC Normal Pod demand-backup-cloud-orc-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:38 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:38 +0000 UTC Normal PerconaServerMySQL.ps.percona.com demand-backup-cloud ClusterStateChanged Paused -> Initializing ps-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:39 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 414ms (414ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:39 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:39 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:41 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:42 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 849ms (849ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:42 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:42 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:42 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:42 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 310ms (310ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:42 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:42 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:53 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:55 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 862ms (862ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:58 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:58 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 830ms (830ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:58 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 832ms (832ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:32:59 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:00 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 862ms (862ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:00 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:00 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:14 +0000 UTC Normal Pod demand-backup-cloud-orc-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:14 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:15 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 847ms (847ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:15 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:15 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:17 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:18 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 841ms (841ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:18 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:18 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:18 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:19 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 844ms (844ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:19 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:19 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:30 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:30 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:30 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-1" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:30 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:31 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:32 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 856ms (856ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:32 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:32 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:34 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-6d69f130-56d4-463e-b59c-3205a02951f3 pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:34 +0000 UTC Normal Pod demand-backup-cloud-mysql-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 808ms (808ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:36 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 803ms (803ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:36 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:36 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:42 +0000 UTC Normal Pod demand-backup-cloud-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-6d69f130-56d4-463e-b59c-3205a02951f3" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:43 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:43 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 315ms (315ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:43 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:44 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:45 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 829ms (829ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 829ms (829ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 842ms (842ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:50 +0000 UTC Normal Pod demand-backup-cloud-orc-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:51 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:52 +0000 UTC Warning Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:52 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 853ms (853ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:52 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:52 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:54 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:55 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 849ms (849ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:55 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:55 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:55 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:55 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 827ms (827ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:56 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:33:56 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:04 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:34:03 Waiting for MySQL ready state 2026/03/23 11:34:03 MySQL is ready 2026/03/23 11:34:03 Peers: [3366626163333632.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 3366643835643931.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:34:03 FQDN: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:34:03 Primary: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:34:03 lookup demand-backup-cloud-mysql-1 [10.74.216.13] 2026/03/23 11:34:03 PodIP: 10.74.216.13 2026/03/23 11:34:03 lookup demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.217.8] 2026/03/23 11:34:03 PrimaryIP: 10.74.217.8 2026/03/23 11:34:03 Donor: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:34:03 Opening connection to 10.74.216.13 2026/03/23 11:34:03 Clone required: true 2026/03/23 11:34:03 Checking if a clone in progress 2026/03/23 11:34:03 Clone in progress: false 2026/03/23 11:34:03 Cloning from demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:34:04 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:04 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:08 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 852ms (852ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:38 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:39 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:39 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-2" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:42 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:43 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-7cbc76a0-a206-4a24-9b56-b000aa940ec1 pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:43 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:44 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 844ms (844ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:44 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:44 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:44 +0000 UTC Normal Pod demand-backup-cloud-mysql-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:46 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:47 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 854ms (854ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:47 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:47 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:47 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:48 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 854ms (854ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:48 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:48 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7cbc76a0-a206-4a24-9b56-b000aa940ec1" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:52 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:53 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 844ms (844ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:53 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:53 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:55 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 838ms (838ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 342ms (342ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:57 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 845ms (845ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:57 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:34:57 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:04 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:04 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:05 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 895ms (895ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:05 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:05 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:07 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:08 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 818ms (818ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:08 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:08 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:08 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:09 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 854ms (854ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:09 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:09 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:13 +0000 UTC Warning Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:35:12 Waiting for MySQL ready state 2026/03/23 11:35:12 MySQL is ready 2026/03/23 11:35:12 Peers: [3366626163333632.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 3366643835643931.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 6634646236633139.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:35:12 FQDN: demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:35:12 Primary: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:35:12 lookup demand-backup-cloud-mysql-2 [10.74.218.12] 2026/03/23 11:35:12 PodIP: 10.74.218.12 2026/03/23 11:35:12 lookup demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.217.8] 2026/03/23 11:35:12 PrimaryIP: 10.74.217.8 2026/03/23 11:35:12 Donor: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:35:12 Opening connection to 10.74.218.12 2026/03/23 11:35:12 Clone required: true 2026/03/23 11:35:12 Checking if a clone in progress 2026/03/23 11:35:12 Clone in progress: false 2026/03/23 11:35:12 Cloning from demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:35:13 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:13 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:35:17 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 820ms (820ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:07 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh Binding Scheduled Successfully assigned kuttl-test-bright-herring/xb-demand-backup-cloud-gcp-gcp-cs-gf9wh to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:07 +0000 UTC Normal Job.batch xb-demand-backup-cloud-gcp-gcp-cs SuccessfulCreate Created pod: xb-demand-backup-cloud-gcp-gcp-cs-gf9wh job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:08 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:08 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 306ms (306ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:08 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:08 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:10 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:11 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 843ms (843ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:11 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:11 +0000 UTC Normal Pod xb-demand-backup-cloud-gcp-gcp-cs-gf9wh.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:18 +0000 UTC Normal Job.batch xb-demand-backup-cloud-gcp-gcp-cs Completed Job completed job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:38 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:38 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:38 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:38 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:39 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:39 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:40 +0000 UTC Warning Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:40 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:40 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:41 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:42 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:43 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:43 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:43 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:36:43 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:50 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:36:50 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:36:55 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:09 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9 Binding Scheduled Successfully assigned kuttl-test-bright-herring/xb-restore-demand-backup-cloud-restore-gcp-5htx9 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:09 +0000 UTC Warning Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9 FailedAttachVolume Multi-Attach error for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:09 +0000 UTC Normal Job.batch xb-restore-demand-backup-cloud-restore-gcp SuccessfulCreate Created pod: xb-restore-demand-backup-cloud-restore-gcp-5htx9 job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:21 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:22 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:22 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 299ms (299ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:22 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:22 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:23 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:24 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 815ms (815ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:24 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:24 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-gcp-5htx9.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:33 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:33 +0000 UTC Warning Pod demand-backup-cloud-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:33 +0000 UTC Normal Pod demand-backup-cloud-orc-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:33 +0000 UTC Normal Job.batch xb-restore-demand-backup-cloud-restore-gcp Completed Job completed job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:34 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 854ms (854ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:36 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:37 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 271ms (271ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:37 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:37 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:37 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:37 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 318ms (318ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:37 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:37 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:49 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 855ms (855ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:52 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:52 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:53 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:53 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 298ms (298ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:53 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 279ms (279ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 271ms (271ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:37:54 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:09 +0000 UTC Normal Pod demand-backup-cloud-orc-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:09 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:09 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 287ms (287ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:09 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:09 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:12 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:12 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 838ms (838ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:12 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:12 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:12 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:13 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 827ms (827ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:13 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:13 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:26 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:26 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:26 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-1" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:30 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-c59b48d5-fbd0-45e0-ac35-eda13e3ac2a7 pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:30 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:30 +0000 UTC Normal Pod demand-backup-cloud-mysql-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:31 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:31 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 839ms (839ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:31 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:31 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 819ms (819ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:34 +0000 UTC Normal Pod demand-backup-cloud-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-c59b48d5-fbd0-45e0-ac35-eda13e3ac2a7" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 836ms (836ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:35 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:36 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 848ms (848ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:36 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:36 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 301ms (301ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 914ms (914ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 291ms (291ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:45 +0000 UTC Normal Pod demand-backup-cloud-orc-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:45 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:46 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 294ms (294ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:46 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:46 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:47 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:47 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 279ms (279ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 832ms (832ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:52 +0000 UTC Warning Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:56 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:38:55 Waiting for MySQL ready state 2026/03/23 11:38:55 MySQL is ready 2026/03/23 11:38:55 Peers: [3232303833366337.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 6262626661616462.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:38:55 FQDN: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:38:55 Primary: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:38:55 lookup demand-backup-cloud-mysql-1 [10.74.216.16] 2026/03/23 11:38:55 PodIP: 10.74.216.16 2026/03/23 11:38:55 lookup demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.217.11] 2026/03/23 11:38:55 PrimaryIP: 10.74.217.11 2026/03/23 11:38:55 Donor: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:38:55 Opening connection to 10.74.216.16 2026/03/23 11:38:55 Clone required: true 2026/03/23 11:38:55 Checking if a clone in progress 2026/03/23 11:38:55 Clone in progress: false 2026/03/23 11:38:55 Cloning from demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:38:56 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:38:56 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:00 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 293ms (293ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:31 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:31 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:31 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-2" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:34 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-05b482a9-cd92-4301-b594-4932b2aa26df pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:35 +0000 UTC Normal Pod demand-backup-cloud-mysql-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:38 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 333ms (333ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:38 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:38 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:39 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-05b482a9-cd92-4301-b594-4932b2aa26df" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 269ms (269ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 263ms (263ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:40 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:45 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 288ms (288ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 260ms (260ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:49 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 957ms (957ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:49 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:49 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:49 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 848ms (848ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:57 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:57 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:57 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 293ms (293ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:57 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:57 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:59 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:59 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 279ms (279ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:59 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:59 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:39:59 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:40:00 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 833ms (833ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:40:00 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:40:00 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:40:06 +0000 UTC Warning Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:40:05 Waiting for MySQL ready state 2026/03/23 11:40:05 MySQL is ready 2026/03/23 11:40:05 Peers: [3232303833366337.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 3365383564656363.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 6262626661616462.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:40:05 FQDN: demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:40:05 Primary: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:40:05 lookup demand-backup-cloud-mysql-2 [10.74.218.17] 2026/03/23 11:40:05 PodIP: 10.74.218.17 2026/03/23 11:40:05 lookup demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.217.11] 2026/03/23 11:40:05 PrimaryIP: 10.74.217.11 2026/03/23 11:40:05 Donor: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:40:05 Opening connection to 10.74.218.17 2026/03/23 11:40:05 Clone required: true 2026/03/23 11:40:05 Checking if a clone in progress 2026/03/23 11:40:05 Clone in progress: false 2026/03/23 11:40:05 Cloning from demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:40:06 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:40:06 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:40:09 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 819ms (819ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:03 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss Binding Scheduled Successfully assigned kuttl-test-bright-herring/xb-demand-backup-cloud-azure-azure-blob-7l7ss to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:03 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:03 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 335ms (335ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:03 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:03 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:03 +0000 UTC Normal Job.batch xb-demand-backup-cloud-azure-azure-blob SuccessfulCreate Created pod: xb-demand-backup-cloud-azure-azure-blob-7l7ss job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:05 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:06 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 890ms (890ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:06 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:06 +0000 UTC Normal Pod xb-demand-backup-cloud-azure-azure-blob-7l7ss.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:12 +0000 UTC Normal Job.batch xb-demand-backup-cloud-azure-azure-blob Completed Job completed job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:33 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:33 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:33 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:33 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:33 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:33 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:33 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:34 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:34 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:35 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:35 +0000 UTC Warning Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:41:35 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:41 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:41 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:41 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:41:46 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:04 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr Binding Scheduled Successfully assigned kuttl-test-bright-herring/xb-restore-demand-backup-cloud-restore-azure-hpgkr to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:04 +0000 UTC Warning Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr FailedAttachVolume Multi-Attach error for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:04 +0000 UTC Normal Job.batch xb-restore-demand-backup-cloud-restore-azure SuccessfulCreate Created pod: xb-restore-demand-backup-cloud-restore-azure-hpgkr job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:17 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:18 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:19 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 843ms (843ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:19 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:19 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:21 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:22 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 842ms (842ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:22 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:22 +0000 UTC Normal Pod xb-restore-demand-backup-cloud-restore-azure-hpgkr.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:31 +0000 UTC Normal Job.batch xb-restore-demand-backup-cloud-restore-azure Completed Job completed job-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:32 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:32 +0000 UTC Warning Pod demand-backup-cloud-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:32 +0000 UTC Normal Pod demand-backup-cloud-orc-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:32 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:33 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 859ms (859ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:33 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:33 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 300ms (300ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:35 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:36 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 269ms (269ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:36 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:36 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3051ad2f-7e2d-415a-bacd-8e1f236017e0" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 853ms (853ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 284ms (284ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:50 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 830ms (830ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 285ms (285ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:42:51 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:07 +0000 UTC Normal Pod demand-backup-cloud-orc-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:08 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:08 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 852ms (852ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:08 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:09 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:10 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:11 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 843ms (843ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:11 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:11 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:11 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:12 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 844ms (844ms including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:12 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:12 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:22 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:22 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:22 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-1" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:25 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-0 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:25 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:26 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-be950a6a-f20d-4707-8f81-cb104242a3c9 pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:26 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 849ms (849ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:26 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:26 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:26 +0000 UTC Normal Pod demand-backup-cloud-mysql-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:28 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:28 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 264ms (264ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:28 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:28 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:28 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:30 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 1.256s (1.256s including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:30 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:30 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:33 +0000 UTC Normal Pod demand-backup-cloud-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-be950a6a-f20d-4707-8f81-cb104242a3c9" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:35 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:35 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 292ms (292ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:35 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:35 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 271ms (271ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:37 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 918ms (918ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:38 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 307ms (307ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:39 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:43 +0000 UTC Normal Pod demand-backup-cloud-orc-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-orc-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:44 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:45 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 871ms (871ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:45 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:45 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:47 +0000 UTC Warning Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:47 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 1.162s (1.162s including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:48 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:49 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 1.254s (1.254s including waiting). Image size: 73448087 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:49 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:49 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:55 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:43:55 Waiting for MySQL ready state 2026/03/23 11:43:55 MySQL is ready 2026/03/23 11:43:55 Peers: [6135363139626465.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 6439386661386438.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:43:55 FQDN: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:43:55 Primary: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:43:55 lookup demand-backup-cloud-mysql-1 [10.74.216.19] 2026/03/23 11:43:55 PodIP: 10.74.216.19 2026/03/23 11:43:55 lookup demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.217.14] 2026/03/23 11:43:55 PrimaryIP: 10.74.217.14 2026/03/23 11:43:55 Donor: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:43:55 Opening connection to 10.74.216.19 2026/03/23 11:43:55 Clone required: true 2026/03/23 11:43:55 Checking if a clone in progress 2026/03/23 11:43:55 Clone in progress: false 2026/03/23 11:43:55 Cloning from demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:43:55 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:43:55 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:00 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 1.294s (1.294s including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:30 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:30 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:30 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-bright-herring/datadir-demand-backup-cloud-mysql-2" pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:32 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-1 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-kptm default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:33 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:34 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-cloud-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-706ecc1b-5b8d-429e-914f-8c37c7aced58 pd.csi.storage.gke.io_gke-1c3e51425d614785944b-9943-2a8b-vm_8d83d1b5-5b24-4c90-ad0d-ce08f1a9d05b logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 873ms (873ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:34 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:34 +0000 UTC Normal Pod demand-backup-cloud-mysql-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-mysql-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-qj6z default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:36 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 819ms (819ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:37 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:38 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 837ms (837ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:38 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:38 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:42 +0000 UTC Normal Pod demand-backup-cloud-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-706ecc1b-5b8d-429e-914f-8c37c7aced58" attachdetach-controller logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:43 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:44 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 926ms (926ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:44 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:44 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:46 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 883ms (883ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:47 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.4" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.4" in 833ms (833ms including waiting). Image size: 448774388 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:48 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:49 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 823ms (823ms including waiting). Image size: 138742280 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:49 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:49 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:54 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-bright-herring/demand-backup-cloud-haproxy-2 to gke-jen-ps-1251-a7b88ac1-default-pool-54596903-hkqr default-scheduler logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:54 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:55 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1251-a7b88ac1" in 862ms (862ms including waiting). Image size: 110771092 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:55 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:55 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:57 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:58 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 850ms (850ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:58 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:58 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:58 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:59 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 863ms (863ms including waiting). Image size: 103612925 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:59 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:44:59 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:45:04 +0000 UTC Warning Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2026/03/23 11:45:03 Waiting for MySQL ready state 2026/03/23 11:45:03 MySQL is ready 2026/03/23 11:45:03 Peers: [6135363139626465.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 6166316461323638.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring 6439386661386438.demand-backup-cloud-mysql-unready.kuttl-test-bright-herring] 2026/03/23 11:45:03 FQDN: demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:45:03 Primary: demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring Replicas: [demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring demand-backup-cloud-mysql-2.demand-backup-cloud-mysql.kuttl-test-bright-herring] 2026/03/23 11:45:03 lookup demand-backup-cloud-mysql-2 [10.74.218.22] 2026/03/23 11:45:03 PodIP: 10.74.218.22 2026/03/23 11:45:03 lookup demand-backup-cloud-mysql-0.demand-backup-cloud-mysql.kuttl-test-bright-herring [10.74.217.14] 2026/03/23 11:45:03 PrimaryIP: 10.74.217.14 2026/03/23 11:45:03 Donor: demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:45:03 Opening connection to 10.74.218.22 2026/03/23 11:45:03 Clone required: true 2026/03/23 11:45:03 Checking if a clone in progress 2026/03/23 11:45:03 Clone in progress: false 2026/03/23 11:45:03 Cloning from demand-backup-cloud-mysql-1.demand-backup-cloud-mysql.kuttl-test-bright-herring 2026/03/23 11:45:04 Clone finished. Restarting container... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:45:04 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:45:08 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.4" in 854ms (854ms including waiting). Image size: 451318419 bytes. kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:02 +0000 UTC Normal Pod demand-backup-cloud-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Warning PodDisruptionBudget.policy demand-backup-cloud-haproxy CalculateExpectedPodCountFailed Failed to calculate the number of expected pods: found no controllers for pod "demand-backup-cloud-haproxy-0" controllermanager logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Warning Pod demand-backup-cloud-mysql-2.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:46:03 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:03 +0000 UTC Normal Pod demand-backup-cloud-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:05 +0000 UTC Warning Pod demand-backup-cloud-mysql-1.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:46:05 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:06 +0000 UTC Warning Pod demand-backup-cloud-orc-2.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.74.217.15:3000/api/health": dial tcp 10.74.217.15:3000: connect: connection refused kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:07 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:46:07 MySQL state is not ready... kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:07 +0000 UTC Warning Pod demand-backup-cloud-orc-0.spec.containers{orchestrator} Unhealthy Readiness probe failed: Get "http://10.74.218.20:3000/api/health": dial tcp 10.74.218.20:3000: connect: connection refused kubelet logger.go:42: 11:46:16 | demand-backup-cloud | 2026-03-23 11:46:12 +0000 UTC Warning Pod demand-backup-cloud-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2026/03/23 11:46:12 MySQL state is not ready... kubelet logger.go:42: 11:46:17 | demand-backup-cloud | Deleting namespace "kuttl-test-bright-herring" === NAME kuttl harness.go:404: run tests finished harness.go:511: cleaning up harness.go:568: removing temp folder: "" --- PASS: kuttl (1379.63s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup-cloud (1378.83s) PASS