=== RUN   kuttl
    harness.go:464: starting setup
    harness.go:255: running tests using configured kubeconfig.
    harness.go:278: Successful connection to cluster at: https://34.66.113.204
    harness.go:363: running tests
    harness.go:75: going to run test suite with timeout of 180 seconds for each step
    harness.go:375: testsuite: e2e-tests/tests has 34 tests
=== RUN   kuttl/harness
=== RUN   kuttl/harness/recreate
=== PAUSE kuttl/harness/recreate
=== CONT  kuttl/harness/recreate
    logger.go:42: 02:45:15 | recreate | Creating namespace: kuttl-test-able-dragon
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | starting test step 0-deploy-operator
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        init_temp_dir # do this only in the first TestStep
        
        deploy_operator
        deploy_non_tls_cluster_secrets
        deploy_tls_cluster_secrets
        deploy_client]
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | + source ../../functions
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ realpath ../../..
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | ++++ pwd
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | ++ test_name=recreate
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | ++++ which gdate
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | ++++ which date
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ date=/usr/bin/date
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ oc get projects
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ :
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ kubectl get nodes
    logger.go:42: 02:45:15 | recreate/0-deploy-operator | +++ grep '^minikube'
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + init_temp_dir
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + rm -rf /tmp/kuttl/ps/recreate
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + mkdir -p /tmp/kuttl/ps/recreate
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + deploy_operator
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + destroy_operator
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | Error from server (NotFound): deployments.apps "percona-server-mysql-operator" not found
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + true
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + [[ -n ps-operator ]]
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + kubectl delete namespace ps-operator --force --grace-period=0
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | Error from server (NotFound): namespaces "ps-operator" not found
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + true
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + [[ -n ps-operator ]]
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + create_namespace ps-operator
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + local namespace=ps-operator
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + [[ -n '' ]]
    logger.go:42: 02:45:16 | recreate/0-deploy-operator | + kubectl delete namespace ps-operator --ignore-not-found
    logger.go:42: 02:45:17 | recreate/0-deploy-operator | + kubectl wait --for=delete namespace ps-operator
    logger.go:42: 02:45:17 | recreate/0-deploy-operator | + kubectl create namespace ps-operator
    logger.go:42: 02:45:18 | recreate/0-deploy-operator | namespace/ps-operator created
    logger.go:42: 02:45:18 | recreate/0-deploy-operator | + kubectl -n ps-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy/crd.yaml
    logger.go:42: 02:45:18 | recreate/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied
    logger.go:42: 02:45:19 | recreate/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied
    logger.go:42: 02:45:20 | recreate/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied
    logger.go:42: 02:45:20 | recreate/0-deploy-operator | + '[' -n ps-operator ']'
    logger.go:42: 02:45:20 | recreate/0-deploy-operator | + kubectl -n ps-operator apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy/cw-rbac.yaml
    logger.go:42: 02:45:21 | recreate/0-deploy-operator | serviceaccount/percona-server-mysql-operator created
    logger.go:42: 02:45:21 | recreate/0-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created
    logger.go:42: 02:45:21 | recreate/0-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged
    logger.go:42: 02:45:21 | recreate/0-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created
    logger.go:42: 02:45:22 | recreate/0-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged
    logger.go:42: 02:45:22 | recreate/0-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"'
    logger.go:42: 02:45:22 | recreate/0-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "DEBUG"'
    logger.go:42: 02:45:22 | recreate/0-deploy-operator | + kubectl -n ps-operator apply -f -
    logger.go:42: 02:45:22 | recreate/0-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:45:22 | recreate/0-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-874-59fcfc82"' /mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy/cw-operator.yaml
    logger.go:42: 02:45:23 | recreate/0-deploy-operator | configmap/percona-server-mysql-operator-config created
    logger.go:42: 02:45:23 | recreate/0-deploy-operator | deployment.apps/percona-server-mysql-operator created
    logger.go:42: 02:45:23 | recreate/0-deploy-operator | + deploy_non_tls_cluster_secrets
    logger.go:42: 02:45:23 | recreate/0-deploy-operator | + kubectl -n kuttl-test-able-dragon apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf/secrets.yaml
    logger.go:42: 02:45:24 | recreate/0-deploy-operator | secret/test-secrets created
    logger.go:42: 02:45:24 | recreate/0-deploy-operator | + deploy_tls_cluster_secrets
    logger.go:42: 02:45:24 | recreate/0-deploy-operator | + kubectl -n kuttl-test-able-dragon apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf/ssl-secret.yaml
    logger.go:42: 02:45:25 | recreate/0-deploy-operator | secret/test-ssl created
    logger.go:42: 02:45:25 | recreate/0-deploy-operator | + deploy_client
    logger.go:42: 02:45:25 | recreate/0-deploy-operator | + kubectl -n kuttl-test-able-dragon apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf/client.yaml
    logger.go:42: 02:45:26 | recreate/0-deploy-operator | pod/mysql-client created
    logger.go:42: 02:45:26 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1]
    logger.go:42: 02:45:26 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist.
    logger.go:42: 02:45:27 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found.
    logger.go:42: 02:45:28 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1]
    logger.go:42: 02:45:28 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist.
    logger.go:42: 02:45:28 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found.
    logger.go:42: 02:45:30 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1]
    logger.go:42: 02:45:30 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist.
    logger.go:42: 02:45:30 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found.
    logger.go:42: 02:45:31 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1]
    logger.go:42: 02:45:31 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist.
    logger.go:42: 02:45:32 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found.
    logger.go:42: 02:45:33 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1]
    logger.go:42: 02:45:33 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist.
    logger.go:42: 02:45:33 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found.
    logger.go:42: 02:45:34 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1]
    logger.go:42: 02:45:35 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist.
    logger.go:42: 02:45:35 | recreate/0-deploy-operator | INFO   Found 1 resource(s).
    logger.go:42: 02:45:35 | recreate/0-deploy-operator | NAME                            NAMESPACE     COL0
    logger.go:42: 02:45:35 | recreate/0-deploy-operator | percona-server-mysql-operator   ps-operator   1
    logger.go:42: 02:45:35 | recreate/0-deploy-operator | ASSERT PASS
    logger.go:42: 02:45:35 | recreate/0-deploy-operator | test step completed 0-deploy-operator
    logger.go:42: 02:45:35 | recreate/1-create-cluster | starting test step 1-create-cluster
    logger.go:42: 02:45:35 | recreate/1-create-cluster | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        get_cr \
            | yq eval '.spec.mysql.clusterType="async"' - \
            | yq eval '.spec.mysql.size=3' - \
            | yq eval '.spec.proxy.haproxy.enabled=true' - \
            | yq eval '.spec.proxy.haproxy.size=3' - \
            | yq eval '.spec.orchestrator.enabled=true' - \
            | yq eval '.spec.orchestrator.size=3' - \
            | kubectl -n "${NAMESPACE}" apply -f -]
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + source ../../functions
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ realpath ../../..
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++++ pwd
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ test_name=recreate
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++++ which gdate
    logger.go:42: 02:45:35 | recreate/1-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++++ which date
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ date=/usr/bin/date
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ oc get projects
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ :
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ kubectl get nodes
    logger.go:42: 02:45:35 | recreate/1-create-cluster | +++ grep '^minikube'
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + get_cr
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + local name_suffix=
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.mysql.clusterType="async"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval .spec.mysql.size=3 -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval .spec.orchestrator.size=3 -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval .spec.proxy.haproxy.size=3 -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + '[' -n '' ']'
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + kubectl -n kuttl-test-able-dragon apply -f -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval .spec.orchestrator.enabled=true -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.mysql.clusterType="async"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.secretsName="test-secrets"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.metadata.name="%s"' recreate
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval .spec.orchestrator.enabled=true -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.metadata.name="recreate"' /mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy/cr.yaml
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-874-59fcfc82"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' -
    logger.go:42: 02:45:35 | recreate/1-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:45:35 | recreate/1-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' -
    logger.go:42: 02:45:36 | recreate/1-create-cluster | perconaservermysql.ps.percona.com/recreate created
    logger.go:42: 02:48:50 | recreate/1-create-cluster | test step completed 1-create-cluster
    logger.go:42: 02:48:50 | recreate/2-write-data | starting test step 2-write-data
    logger.go:42: 02:48:50 | recreate/2-write-data | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        run_mysql \
            "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \
            "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"
        
        run_mysql \
            "INSERT myDB.myTable (id) VALUES (100500)" \
            "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"
        
        for i in 0 1 2; do
            host=$(get_mysql_headless_fqdn $(get_cluster_name) $i)
            data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${host} -uroot -proot_password")
            kubectl create configmap -n "${NAMESPACE}" 02-write-data-${i} --from-literal=data="${data}"
        done]
    logger.go:42: 02:48:50 | recreate/2-write-data | + source ../../functions
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ realpath ../../..
    logger.go:42: 02:48:50 | recreate/2-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:48:50 | recreate/2-write-data | ++++ pwd
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:48:50 | recreate/2-write-data | ++ test_name=recreate
    logger.go:42: 02:48:50 | recreate/2-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:48:50 | recreate/2-write-data | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:48:50 | recreate/2-write-data | ++++ which gdate
    logger.go:42: 02:48:50 | recreate/2-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:48:50 | recreate/2-write-data | ++++ which date
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ date=/usr/bin/date
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ oc get projects
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ :
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ kubectl get nodes
    logger.go:42: 02:48:50 | recreate/2-write-data | +++ grep '^minikube'
    logger.go:42: 02:48:51 | recreate/2-write-data | +++ get_cluster_name
    logger.go:42: 02:48:51 | recreate/2-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:48:51 | recreate/2-write-data | ++ get_haproxy_svc recreate
    logger.go:42: 02:48:51 | recreate/2-write-data | ++ local cluster=recreate
    logger.go:42: 02:48:51 | recreate/2-write-data | ++ echo recreate-haproxy
    logger.go:42: 02:48:51 | recreate/2-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:48:51 | recreate/2-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)'
    logger.go:42: 02:48:51 | recreate/2-write-data | + local 'uri=-h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:48:51 | recreate/2-write-data | + local pod=
    logger.go:42: 02:48:51 | recreate/2-write-data | ++ get_client_pod
    logger.go:42: 02:48:51 | recreate/2-write-data | ++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:48:51 | recreate/2-write-data | + client_pod=mysql-client
    logger.go:42: 02:48:51 | recreate/2-write-data | + wait_pod mysql-client
    logger.go:42: 02:48:51 | recreate/2-write-data | + local pod=mysql-client
    logger.go:42: 02:48:51 | recreate/2-write-data | + set +o xtrace
    logger.go:42: 02:48:52 | recreate/2-write-data | mysql-clienttrue
    logger.go:42: 02:48:52 | recreate/2-write-data | + sed -e 's/mysql: //'
    logger.go:42: 02:48:52 | recreate/2-write-data | + grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:48:52 | recreate/2-write-data | + kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:48:54 | recreate/2-write-data | + :
    logger.go:42: 02:48:54 | recreate/2-write-data | +++ get_cluster_name
    logger.go:42: 02:48:54 | recreate/2-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:48:54 | recreate/2-write-data | ++ get_haproxy_svc recreate
    logger.go:42: 02:48:54 | recreate/2-write-data | ++ local cluster=recreate
    logger.go:42: 02:48:54 | recreate/2-write-data | ++ echo recreate-haproxy
    logger.go:42: 02:48:54 | recreate/2-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:48:54 | recreate/2-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)'
    logger.go:42: 02:48:54 | recreate/2-write-data | + local 'uri=-h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:48:54 | recreate/2-write-data | + local pod=
    logger.go:42: 02:48:54 | recreate/2-write-data | ++ get_client_pod
    logger.go:42: 02:48:54 | recreate/2-write-data | ++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:48:54 | recreate/2-write-data | + client_pod=mysql-client
    logger.go:42: 02:48:54 | recreate/2-write-data | + wait_pod mysql-client
    logger.go:42: 02:48:54 | recreate/2-write-data | + local pod=mysql-client
    logger.go:42: 02:48:54 | recreate/2-write-data | + set +o xtrace
    logger.go:42: 02:48:55 | recreate/2-write-data | mysql-clienttrue
    logger.go:42: 02:48:55 | recreate/2-write-data | + sed -e 's/mysql: //'
    logger.go:42: 02:48:55 | recreate/2-write-data | + grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:48:55 | recreate/2-write-data | + kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:48:56 | recreate/2-write-data | + :
    logger.go:42: 02:48:56 | recreate/2-write-data | + for i in 0 1 2
    logger.go:42: 02:48:56 | recreate/2-write-data | +++ get_cluster_name
    logger.go:42: 02:48:56 | recreate/2-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ get_mysql_headless_fqdn recreate 0
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ local cluster=recreate
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ local index=0
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ echo recreate-mysql-0.recreate-mysql
    logger.go:42: 02:48:57 | recreate/2-write-data | + host=recreate-mysql-0.recreate-mysql
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ local 'uri=-h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ local pod=
    logger.go:42: 02:48:57 | recreate/2-write-data | +++ get_client_pod
    logger.go:42: 02:48:57 | recreate/2-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ local pod=mysql-client
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ set +o xtrace
    logger.go:42: 02:48:57 | recreate/2-write-data | mysql-clienttrue
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:48:57 | recreate/2-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:48:59 | recreate/2-write-data | + data=100500
    logger.go:42: 02:48:59 | recreate/2-write-data | + kubectl create configmap -n kuttl-test-able-dragon 02-write-data-0 --from-literal=data=100500
    logger.go:42: 02:48:59 | recreate/2-write-data | configmap/02-write-data-0 created
    logger.go:42: 02:48:59 | recreate/2-write-data | + for i in 0 1 2
    logger.go:42: 02:48:59 | recreate/2-write-data | +++ get_cluster_name
    logger.go:42: 02:48:59 | recreate/2-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ get_mysql_headless_fqdn recreate 1
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ local cluster=recreate
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ local index=1
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ echo recreate-mysql-1.recreate-mysql
    logger.go:42: 02:49:00 | recreate/2-write-data | + host=recreate-mysql-1.recreate-mysql
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ local 'uri=-h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ local pod=
    logger.go:42: 02:49:00 | recreate/2-write-data | +++ get_client_pod
    logger.go:42: 02:49:00 | recreate/2-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ local pod=mysql-client
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ set +o xtrace
    logger.go:42: 02:49:00 | recreate/2-write-data | mysql-clienttrue
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:49:00 | recreate/2-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:49:02 | recreate/2-write-data | + data=100500
    logger.go:42: 02:49:02 | recreate/2-write-data | + kubectl create configmap -n kuttl-test-able-dragon 02-write-data-1 --from-literal=data=100500
    logger.go:42: 02:49:02 | recreate/2-write-data | configmap/02-write-data-1 created
    logger.go:42: 02:49:02 | recreate/2-write-data | + for i in 0 1 2
    logger.go:42: 02:49:02 | recreate/2-write-data | +++ get_cluster_name
    logger.go:42: 02:49:02 | recreate/2-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ get_mysql_headless_fqdn recreate 2
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ local cluster=recreate
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ local index=2
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ echo recreate-mysql-2.recreate-mysql
    logger.go:42: 02:49:03 | recreate/2-write-data | + host=recreate-mysql-2.recreate-mysql
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ local 'uri=-h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ local pod=
    logger.go:42: 02:49:03 | recreate/2-write-data | +++ get_client_pod
    logger.go:42: 02:49:03 | recreate/2-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ local pod=mysql-client
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ set +o xtrace
    logger.go:42: 02:49:03 | recreate/2-write-data | mysql-clienttrue
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:49:03 | recreate/2-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:49:05 | recreate/2-write-data | + data=100500
    logger.go:42: 02:49:05 | recreate/2-write-data | + kubectl create configmap -n kuttl-test-able-dragon 02-write-data-2 --from-literal=data=100500
    logger.go:42: 02:49:05 | recreate/2-write-data | configmap/02-write-data-2 created
[controller-runtime] log.SetLogger(...) was never called; logs will not be displayed.
Detected at:
	>  goroutine 25 [running]:
	>  runtime/debug.Stack()
	>  	/nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/runtime/debug/stack.go:24 +0x5e
	>  sigs.k8s.io/controller-runtime/pkg/log.eventuallyFulfillRoot()
	>  	/home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/log.go:60 +0xcd
	>  sigs.k8s.io/controller-runtime/pkg/log.(*delegatingLogSink).WithName(0xc0002a9c00, {0x184a055, 0x14})
	>  	/home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/deleg.go:147 +0x3e
	>  github.com/go-logr/logr.Logger.WithName({{0x1acb7d8, 0xc0002a9c00}, 0x0}, {0x184a055?, 0xc000157f80?})
	>  	/home/mowsiany/go/pkg/mod/github.com/go-logr/logr@v1.2.4/logr.go:336 +0x36
	>  sigs.k8s.io/controller-runtime/pkg/client.newClient(0x131ead3?, {0x0, 0xc00043caf0, {0x1accd90, 0xc0002a8040}, 0x0, {0x0, 0x0}, 0x0})
	>  	/home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:122 +0xf1
	>  sigs.k8s.io/controller-runtime/pkg/client.New(0xc00001cd88?, {0x0, 0xc00043caf0, {0x1accd90, 0xc0002a8040}, 0x0, {0x0, 0x0}, 0x0})
	>  	/home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:103 +0x7d
	>  github.com/kudobuilder/kuttl/pkg/test/utils.NewRetryClient(0xc00001cd88, {0x0, 0xc00043caf0, {0x1accd90, 0xc0002a8040}, 0x0, {0x0, 0x0}, 0x0})
	>  	/home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/utils/kubernetes.go:177 +0x127
	>  github.com/kudobuilder/kuttl/pkg/test.(*Harness).Client(0xc000359208, 0x81?)
	>  	/home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:323 +0x18e
	>  github.com/kudobuilder/kuttl/pkg/test.(*Step).Create(0xc00071a680, 0xc0005844e0, {0xc00012de00, 0x16})
	>  	/home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:177 +0x63
	>  github.com/kudobuilder/kuttl/pkg/test.(*Step).Run(0xc00071a680, 0xc0005844e0, {0xc00012de00, 0x16})
	>  	/home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:457 +0x24a
	>  github.com/kudobuilder/kuttl/pkg/test.(*Case).Run(0xc000435720, 0xc0005844e0, 0xc000369710)
	>  	/home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/case.go:373 +0xaeb
	>  github.com/kudobuilder/kuttl/pkg/test.(*Harness).RunTests.func1.1(0xc0005844e0)
	>  	/home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:401 +0x12e
	>  testing.tRunner(0xc0005844e0, 0xc000011b48)
	>  	/nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1689 +0xfb
	>  created by testing.(*T).Run in goroutine 24
	>  	/nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1742 +0x390
    logger.go:42: 02:49:06 | recreate/2-write-data | test step completed 2-write-data
    logger.go:42: 02:49:06 | recreate/3-pause | starting test step 3-pause
    logger.go:42: 02:49:06 | recreate/3-pause | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        get_cr \
            | yq eval '.spec.pause=true' - \
            | yq eval '.spec.mysql.clusterType="async"' - \
            | yq eval '.spec.mysql.size=3' - \
            | yq eval '.spec.proxy.haproxy.enabled=true' - \
            | yq eval '.spec.proxy.haproxy.size=3' - \
            | yq eval '.spec.orchestrator.enabled=true' - \
            | yq eval '.spec.orchestrator.size=3' - \
            | kubectl -n "${NAMESPACE}" apply -f -]
    logger.go:42: 02:49:06 | recreate/3-pause | + source ../../functions
    logger.go:42: 02:49:06 | recreate/3-pause | +++ realpath ../../..
    logger.go:42: 02:49:06 | recreate/3-pause | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:49:06 | recreate/3-pause | ++++ pwd
    logger.go:42: 02:49:06 | recreate/3-pause | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:49:06 | recreate/3-pause | ++ test_name=recreate
    logger.go:42: 02:49:06 | recreate/3-pause | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:49:06 | recreate/3-pause | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:49:06 | recreate/3-pause | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:49:06 | recreate/3-pause | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:49:06 | recreate/3-pause | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:49:06 | recreate/3-pause | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:49:06 | recreate/3-pause | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:49:06 | recreate/3-pause | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:49:06 | recreate/3-pause | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:49:06 | recreate/3-pause | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:49:06 | recreate/3-pause | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:49:06 | recreate/3-pause | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:49:06 | recreate/3-pause | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:49:06 | recreate/3-pause | ++++ which gdate
    logger.go:42: 02:49:06 | recreate/3-pause | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:49:06 | recreate/3-pause | ++++ which date
    logger.go:42: 02:49:06 | recreate/3-pause | +++ date=/usr/bin/date
    logger.go:42: 02:49:06 | recreate/3-pause | +++ oc get projects
    logger.go:42: 02:49:06 | recreate/3-pause | +++ :
    logger.go:42: 02:49:06 | recreate/3-pause | +++ kubectl get nodes
    logger.go:42: 02:49:06 | recreate/3-pause | +++ grep '^minikube'
    logger.go:42: 02:49:06 | recreate/3-pause | + get_cr
    logger.go:42: 02:49:06 | recreate/3-pause | + local name_suffix=
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval '.spec.mysql.clusterType="async"' -
    logger.go:42: 02:49:06 | recreate/3-pause | + kubectl -n kuttl-test-able-dragon apply -f -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval .spec.orchestrator.size=3 -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval .spec.orchestrator.enabled=true -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval .spec.proxy.haproxy.size=3 -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval .spec.mysql.size=3 -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval .spec.proxy.haproxy.enabled=true -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval '.spec.secretsName="test-secrets"' -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval .spec.pause=true -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval '.spec.sslSecretName="test-ssl"' -
    logger.go:42: 02:49:06 | recreate/3-pause | ++ printf '.metadata.name="%s"' recreate
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval '.spec.upgradeOptions.apply="disabled"' -
    logger.go:42: 02:49:06 | recreate/3-pause | + '[' -n '' ']'
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval '.metadata.name="recreate"' /mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy/cr.yaml
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval '.spec.mysql.clusterType="async"' -
    logger.go:42: 02:49:06 | recreate/3-pause | + yq eval .spec.orchestrator.enabled=true -
    logger.go:42: 02:49:07 | recreate/3-pause | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:49:07 | recreate/3-pause | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-874-59fcfc82"' -
    logger.go:42: 02:49:07 | recreate/3-pause | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:49:07 | recreate/3-pause | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' -
    logger.go:42: 02:49:07 | recreate/3-pause | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:49:07 | recreate/3-pause | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' -
    logger.go:42: 02:49:07 | recreate/3-pause | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:49:07 | recreate/3-pause | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' -
    logger.go:42: 02:49:07 | recreate/3-pause | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:49:07 | recreate/3-pause | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' -
    logger.go:42: 02:49:07 | recreate/3-pause | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:49:07 | recreate/3-pause | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:49:07 | recreate/3-pause | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' -
    logger.go:42: 02:49:07 | recreate/3-pause | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:49:07 | recreate/3-pause | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' -
    logger.go:42: 02:49:07 | recreate/3-pause | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' -
    logger.go:42: 02:49:12 | recreate/3-pause | perconaservermysql.ps.percona.com/recreate configured
    logger.go:42: 02:50:46 | recreate/3-pause | test step completed 3-pause
    logger.go:42: 02:50:46 | recreate/4-unpause | starting test step 4-unpause
    logger.go:42: 02:50:46 | recreate/4-unpause | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        get_cr \
            | yq eval '.spec.pause=false' - \
            | yq eval '.spec.mysql.clusterType="async"' - \
            | yq eval '.spec.mysql.size=3' - \
            | yq eval '.spec.proxy.haproxy.enabled=true' - \
            | yq eval '.spec.proxy.haproxy.size=3' - \
            | yq eval '.spec.orchestrator.enabled=true' - \
            | yq eval '.spec.orchestrator.size=3' - \
            | kubectl -n "${NAMESPACE}" apply -f -]
    logger.go:42: 02:50:46 | recreate/4-unpause | + source ../../functions
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ realpath ../../..
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:50:46 | recreate/4-unpause | ++++ pwd
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ test_name=recreate
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:50:46 | recreate/4-unpause | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:50:46 | recreate/4-unpause | ++++ which gdate
    logger.go:42: 02:50:46 | recreate/4-unpause | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:50:46 | recreate/4-unpause | ++++ which date
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ date=/usr/bin/date
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ oc get projects
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ :
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ kubectl get nodes
    logger.go:42: 02:50:46 | recreate/4-unpause | +++ grep '^minikube'
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval .spec.pause=false -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.mysql.clusterType="async"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval .spec.proxy.haproxy.enabled=true -
    logger.go:42: 02:50:46 | recreate/4-unpause | + get_cr
    logger.go:42: 02:50:46 | recreate/4-unpause | + local name_suffix=
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval .spec.mysql.size=3 -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.secretsName="test-secrets"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval .spec.orchestrator.enabled=true -
    logger.go:42: 02:50:46 | recreate/4-unpause | + kubectl -n kuttl-test-able-dragon apply -f -
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval .spec.proxy.haproxy.size=3 -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.sslSecretName="test-ssl"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.metadata.name="%s"' recreate
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.metadata.name="recreate"' /mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy/cr.yaml
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval .spec.orchestrator.enabled=true -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval .spec.orchestrator.size=3 -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.upgradeOptions.apply="disabled"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.mysql.clusterType="async"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + '[' -n '' ']'
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval -
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:50:46 | recreate/4-unpause | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-874-59fcfc82"' -
    logger.go:42: 02:50:46 | recreate/4-unpause | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' -
    logger.go:42: 02:50:48 | recreate/4-unpause | perconaservermysql.ps.percona.com/recreate configured
    logger.go:42: 02:53:48 | recreate/4-unpause | test step completed 4-unpause
    logger.go:42: 02:53:48 | recreate/5-write-data | starting test step 5-write-data
    logger.go:42: 02:53:48 | recreate/5-write-data | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        run_mysql \
            "INSERT myDB.myTable (id) VALUES (100501)" \
            "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"
        
        for i in 0 1 2; do
            host=$(get_mysql_headless_fqdn $(get_cluster_name) $i)
            data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${host} -uroot -proot_password")
            kubectl create configmap -n "${NAMESPACE}" 06-write-data-${i} --from-literal=data="${data}"
        done]
    logger.go:42: 02:53:48 | recreate/5-write-data | + source ../../functions
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ realpath ../../..
    logger.go:42: 02:53:48 | recreate/5-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:53:48 | recreate/5-write-data | ++++ pwd
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:53:48 | recreate/5-write-data | ++ test_name=recreate
    logger.go:42: 02:53:48 | recreate/5-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:53:48 | recreate/5-write-data | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:53:48 | recreate/5-write-data | ++++ which gdate
    logger.go:42: 02:53:48 | recreate/5-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:53:48 | recreate/5-write-data | ++++ which date
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ date=/usr/bin/date
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ oc get projects
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ :
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ kubectl get nodes
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ grep '^minikube'
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ get_cluster_name
    logger.go:42: 02:53:48 | recreate/5-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:53:48 | recreate/5-write-data | ++ get_haproxy_svc recreate
    logger.go:42: 02:53:48 | recreate/5-write-data | ++ local cluster=recreate
    logger.go:42: 02:53:48 | recreate/5-write-data | ++ echo recreate-haproxy
    logger.go:42: 02:53:48 | recreate/5-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100501)' '-h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:53:48 | recreate/5-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100501)'
    logger.go:42: 02:53:48 | recreate/5-write-data | + local 'uri=-h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:53:48 | recreate/5-write-data | + local pod=
    logger.go:42: 02:53:48 | recreate/5-write-data | ++ get_client_pod
    logger.go:42: 02:53:48 | recreate/5-write-data | ++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:53:49 | recreate/5-write-data | + client_pod=mysql-client
    logger.go:42: 02:53:49 | recreate/5-write-data | + wait_pod mysql-client
    logger.go:42: 02:53:49 | recreate/5-write-data | + local pod=mysql-client
    logger.go:42: 02:53:49 | recreate/5-write-data | + set +o xtrace
    logger.go:42: 02:53:49 | recreate/5-write-data | mysql-clienttrue
    logger.go:42: 02:53:49 | recreate/5-write-data | + kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100501)" | mysql -sN -h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:53:49 | recreate/5-write-data | + sed -e 's/mysql: //'
    logger.go:42: 02:53:49 | recreate/5-write-data | + grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:53:51 | recreate/5-write-data | + :
    logger.go:42: 02:53:51 | recreate/5-write-data | + for i in 0 1 2
    logger.go:42: 02:53:51 | recreate/5-write-data | +++ get_cluster_name
    logger.go:42: 02:53:51 | recreate/5-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ get_mysql_headless_fqdn recreate 0
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ local cluster=recreate
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ local index=0
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ echo recreate-mysql-0.recreate-mysql
    logger.go:42: 02:53:51 | recreate/5-write-data | + host=recreate-mysql-0.recreate-mysql
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ local 'uri=-h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ local pod=
    logger.go:42: 02:53:51 | recreate/5-write-data | +++ get_client_pod
    logger.go:42: 02:53:51 | recreate/5-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ local pod=mysql-client
    logger.go:42: 02:53:51 | recreate/5-write-data | ++ set +o xtrace
    logger.go:42: 02:53:52 | recreate/5-write-data | mysql-clienttrue
    logger.go:42: 02:53:52 | recreate/5-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:52 | recreate/5-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:53:52 | recreate/5-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:53:53 | recreate/5-write-data | + data='100500
    logger.go:42: 02:53:53 | recreate/5-write-data | 100501'
    logger.go:42: 02:53:53 | recreate/5-write-data | + kubectl create configmap -n kuttl-test-able-dragon 06-write-data-0 '--from-literal=data=100500
    logger.go:42: 02:53:53 | recreate/5-write-data | 100501'
    logger.go:42: 02:53:54 | recreate/5-write-data | configmap/06-write-data-0 created
    logger.go:42: 02:53:54 | recreate/5-write-data | + for i in 0 1 2
    logger.go:42: 02:53:54 | recreate/5-write-data | +++ get_cluster_name
    logger.go:42: 02:53:54 | recreate/5-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:53:54 | recreate/5-write-data | ++ get_mysql_headless_fqdn recreate 1
    logger.go:42: 02:53:54 | recreate/5-write-data | ++ local cluster=recreate
    logger.go:42: 02:53:54 | recreate/5-write-data | ++ local index=1
    logger.go:42: 02:53:54 | recreate/5-write-data | ++ echo recreate-mysql-1.recreate-mysql
    logger.go:42: 02:53:54 | recreate/5-write-data | + host=recreate-mysql-1.recreate-mysql
    logger.go:42: 02:53:54 | recreate/5-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:54 | recreate/5-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:53:54 | recreate/5-write-data | ++ local 'uri=-h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:54 | recreate/5-write-data | ++ local pod=
    logger.go:42: 02:53:54 | recreate/5-write-data | +++ get_client_pod
    logger.go:42: 02:53:54 | recreate/5-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:53:55 | recreate/5-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:53:55 | recreate/5-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:53:55 | recreate/5-write-data | ++ local pod=mysql-client
    logger.go:42: 02:53:55 | recreate/5-write-data | ++ set +o xtrace
    logger.go:42: 02:53:55 | recreate/5-write-data | mysql-clienttrue
    logger.go:42: 02:53:55 | recreate/5-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:55 | recreate/5-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:53:55 | recreate/5-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:53:57 | recreate/5-write-data | + data='100500
    logger.go:42: 02:53:57 | recreate/5-write-data | 100501'
    logger.go:42: 02:53:57 | recreate/5-write-data | + kubectl create configmap -n kuttl-test-able-dragon 06-write-data-1 '--from-literal=data=100500
    logger.go:42: 02:53:57 | recreate/5-write-data | 100501'
    logger.go:42: 02:53:57 | recreate/5-write-data | configmap/06-write-data-1 created
    logger.go:42: 02:53:57 | recreate/5-write-data | + for i in 0 1 2
    logger.go:42: 02:53:57 | recreate/5-write-data | +++ get_cluster_name
    logger.go:42: 02:53:57 | recreate/5-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:53:57 | recreate/5-write-data | ++ get_mysql_headless_fqdn recreate 2
    logger.go:42: 02:53:57 | recreate/5-write-data | ++ local cluster=recreate
    logger.go:42: 02:53:57 | recreate/5-write-data | ++ local index=2
    logger.go:42: 02:53:57 | recreate/5-write-data | ++ echo recreate-mysql-2.recreate-mysql
    logger.go:42: 02:53:57 | recreate/5-write-data | + host=recreate-mysql-2.recreate-mysql
    logger.go:42: 02:53:57 | recreate/5-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:57 | recreate/5-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:53:57 | recreate/5-write-data | ++ local 'uri=-h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:57 | recreate/5-write-data | ++ local pod=
    logger.go:42: 02:53:57 | recreate/5-write-data | +++ get_client_pod
    logger.go:42: 02:53:57 | recreate/5-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:53:58 | recreate/5-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:53:58 | recreate/5-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:53:58 | recreate/5-write-data | ++ local pod=mysql-client
    logger.go:42: 02:53:58 | recreate/5-write-data | ++ set +o xtrace
    logger.go:42: 02:53:58 | recreate/5-write-data | mysql-clienttrue
    logger.go:42: 02:53:58 | recreate/5-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:53:58 | recreate/5-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:53:58 | recreate/5-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:54:00 | recreate/5-write-data | + data='100500
    logger.go:42: 02:54:00 | recreate/5-write-data | 100501'
    logger.go:42: 02:54:00 | recreate/5-write-data | + kubectl create configmap -n kuttl-test-able-dragon 06-write-data-2 '--from-literal=data=100500
    logger.go:42: 02:54:00 | recreate/5-write-data | 100501'
    logger.go:42: 02:54:00 | recreate/5-write-data | configmap/06-write-data-2 created
    logger.go:42: 02:54:01 | recreate/5-write-data | test step completed 5-write-data
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | starting test step 7-delete-cluster
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        kubectl delete ps -n ${NAMESPACE} recreate]
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | + source ../../functions
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ realpath ../../..
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | ++++ pwd
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | ++ test_name=recreate
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | ++++ which gdate
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | ++++ which date
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ date=/usr/bin/date
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ oc get projects
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ :
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ kubectl get nodes
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | +++ grep '^minikube'
    logger.go:42: 02:54:01 | recreate/7-delete-cluster | + kubectl delete ps -n kuttl-test-able-dragon recreate
    logger.go:42: 02:54:02 | recreate/7-delete-cluster | perconaservermysql.ps.percona.com "recreate" deleted
    logger.go:42: 02:54:19 | recreate/7-delete-cluster | test step completed 7-delete-cluster
    logger.go:42: 02:54:19 | recreate/8- | starting test step 8-
    logger.go:42: 02:54:19 | recreate/8- | test step completed 8-
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | starting test step 9-recreate-cluster
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        get_cr \
            | yq eval '.spec.pause=false' - \
            | yq eval '.spec.mysql.clusterType="async"' - \
            | yq eval '.spec.mysql.size=3' - \
            | yq eval '.spec.proxy.haproxy.enabled=true' - \
            | yq eval '.spec.proxy.haproxy.size=3' - \
            | yq eval '.spec.orchestrator.enabled=true' - \
            | yq eval '.spec.orchestrator.size=3' - \
            | kubectl -n "${NAMESPACE}" apply -f -]
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | + source ../../functions
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ realpath ../../..
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | ++++ pwd
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | ++ test_name=recreate
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | ++++ which gdate
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | ++++ which date
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ date=/usr/bin/date
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ oc get projects
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ :
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ kubectl get nodes
    logger.go:42: 02:54:19 | recreate/9-recreate-cluster | +++ grep '^minikube'
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + get_cr
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + local name_suffix=
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval .spec.pause=false -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval .spec.mysql.size=3 -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval .spec.proxy.haproxy.enabled=true -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.mysql.clusterType="async"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval .spec.proxy.haproxy.size=3 -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval .spec.orchestrator.enabled=true -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval .spec.orchestrator.size=3 -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + kubectl -n kuttl-test-able-dragon apply -f -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + '[' -n '' ']'
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.metadata.name="%s"' recreate
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.metadata.name="recreate"' /mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy/cr.yaml
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.secretsName="test-secrets"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.sslSecretName="test-ssl"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.mysql.clusterType="async"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval .spec.orchestrator.enabled=true -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-874-59fcfc82"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' -
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:54:20 | recreate/9-recreate-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' -
    logger.go:42: 02:54:21 | recreate/9-recreate-cluster | perconaservermysql.ps.percona.com/recreate created
    logger.go:42: 02:57:46 | recreate/9-recreate-cluster | test step completed 9-recreate-cluster
    logger.go:42: 02:57:46 | recreate/10-write-data | starting test step 10-write-data
    logger.go:42: 02:57:46 | recreate/10-write-data | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        run_mysql \
            "INSERT myDB.myTable (id) VALUES (100502)" \
            "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"
        
        for i in 0 1 2; do
            host=$(get_mysql_headless_fqdn $(get_cluster_name) $i)
            data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${host} -uroot -proot_password")
            kubectl create configmap -n "${NAMESPACE}" 11-write-data-${i} --from-literal=data="${data}"
        done]
    logger.go:42: 02:57:46 | recreate/10-write-data | + source ../../functions
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ realpath ../../..
    logger.go:42: 02:57:46 | recreate/10-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:57:46 | recreate/10-write-data | ++++ pwd
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:57:46 | recreate/10-write-data | ++ test_name=recreate
    logger.go:42: 02:57:46 | recreate/10-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:57:46 | recreate/10-write-data | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:57:46 | recreate/10-write-data | ++++ which gdate
    logger.go:42: 02:57:46 | recreate/10-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:57:46 | recreate/10-write-data | ++++ which date
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ date=/usr/bin/date
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ oc get projects
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ :
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ kubectl get nodes
    logger.go:42: 02:57:46 | recreate/10-write-data | +++ grep '^minikube'
    logger.go:42: 02:57:47 | recreate/10-write-data | +++ get_cluster_name
    logger.go:42: 02:57:47 | recreate/10-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:57:48 | recreate/10-write-data | ++ get_haproxy_svc recreate
    logger.go:42: 02:57:48 | recreate/10-write-data | ++ local cluster=recreate
    logger.go:42: 02:57:48 | recreate/10-write-data | ++ echo recreate-haproxy
    logger.go:42: 02:57:48 | recreate/10-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100502)' '-h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:57:48 | recreate/10-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100502)'
    logger.go:42: 02:57:48 | recreate/10-write-data | + local 'uri=-h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:57:48 | recreate/10-write-data | + local pod=
    logger.go:42: 02:57:48 | recreate/10-write-data | ++ get_client_pod
    logger.go:42: 02:57:48 | recreate/10-write-data | ++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:57:48 | recreate/10-write-data | + client_pod=mysql-client
    logger.go:42: 02:57:48 | recreate/10-write-data | + wait_pod mysql-client
    logger.go:42: 02:57:48 | recreate/10-write-data | + local pod=mysql-client
    logger.go:42: 02:57:48 | recreate/10-write-data | + set +o xtrace
    logger.go:42: 02:57:49 | recreate/10-write-data | mysql-clienttrue
    logger.go:42: 02:57:49 | recreate/10-write-data | + kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100502)" | mysql -sN -h recreate-haproxy -uroot -proot_password'
    logger.go:42: 02:57:49 | recreate/10-write-data | + sed -e 's/mysql: //'
    logger.go:42: 02:57:49 | recreate/10-write-data | + grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:57:50 | recreate/10-write-data | + :
    logger.go:42: 02:57:50 | recreate/10-write-data | + for i in 0 1 2
    logger.go:42: 02:57:50 | recreate/10-write-data | +++ get_cluster_name
    logger.go:42: 02:57:50 | recreate/10-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:57:50 | recreate/10-write-data | ++ get_mysql_headless_fqdn recreate 0
    logger.go:42: 02:57:50 | recreate/10-write-data | ++ local cluster=recreate
    logger.go:42: 02:57:50 | recreate/10-write-data | ++ local index=0
    logger.go:42: 02:57:50 | recreate/10-write-data | ++ echo recreate-mysql-0.recreate-mysql
    logger.go:42: 02:57:50 | recreate/10-write-data | + host=recreate-mysql-0.recreate-mysql
    logger.go:42: 02:57:50 | recreate/10-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:50 | recreate/10-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:57:50 | recreate/10-write-data | ++ local 'uri=-h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:50 | recreate/10-write-data | ++ local pod=
    logger.go:42: 02:57:50 | recreate/10-write-data | +++ get_client_pod
    logger.go:42: 02:57:50 | recreate/10-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:57:51 | recreate/10-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:57:51 | recreate/10-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:57:51 | recreate/10-write-data | ++ local pod=mysql-client
    logger.go:42: 02:57:51 | recreate/10-write-data | ++ set +o xtrace
    logger.go:42: 02:57:51 | recreate/10-write-data | mysql-clienttrue
    logger.go:42: 02:57:51 | recreate/10-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-0.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:51 | recreate/10-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:57:51 | recreate/10-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:57:53 | recreate/10-write-data | + data='100500
    logger.go:42: 02:57:53 | recreate/10-write-data | 100501
    logger.go:42: 02:57:53 | recreate/10-write-data | 100502'
    logger.go:42: 02:57:53 | recreate/10-write-data | + kubectl create configmap -n kuttl-test-able-dragon 11-write-data-0 '--from-literal=data=100500
    logger.go:42: 02:57:53 | recreate/10-write-data | 100501
    logger.go:42: 02:57:53 | recreate/10-write-data | 100502'
    logger.go:42: 02:57:53 | recreate/10-write-data | configmap/11-write-data-0 created
    logger.go:42: 02:57:53 | recreate/10-write-data | + for i in 0 1 2
    logger.go:42: 02:57:53 | recreate/10-write-data | +++ get_cluster_name
    logger.go:42: 02:57:53 | recreate/10-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ get_mysql_headless_fqdn recreate 1
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ local cluster=recreate
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ local index=1
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ echo recreate-mysql-1.recreate-mysql
    logger.go:42: 02:57:54 | recreate/10-write-data | + host=recreate-mysql-1.recreate-mysql
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ local 'uri=-h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ local pod=
    logger.go:42: 02:57:54 | recreate/10-write-data | +++ get_client_pod
    logger.go:42: 02:57:54 | recreate/10-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ local pod=mysql-client
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ set +o xtrace
    logger.go:42: 02:57:54 | recreate/10-write-data | mysql-clienttrue
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-1.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:57:54 | recreate/10-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:57:56 | recreate/10-write-data | + data='100500
    logger.go:42: 02:57:56 | recreate/10-write-data | 100501
    logger.go:42: 02:57:56 | recreate/10-write-data | 100502'
    logger.go:42: 02:57:56 | recreate/10-write-data | + kubectl create configmap -n kuttl-test-able-dragon 11-write-data-1 '--from-literal=data=100500
    logger.go:42: 02:57:56 | recreate/10-write-data | 100501
    logger.go:42: 02:57:56 | recreate/10-write-data | 100502'
    logger.go:42: 02:57:56 | recreate/10-write-data | configmap/11-write-data-1 created
    logger.go:42: 02:57:56 | recreate/10-write-data | + for i in 0 1 2
    logger.go:42: 02:57:56 | recreate/10-write-data | +++ get_cluster_name
    logger.go:42: 02:57:56 | recreate/10-write-data | +++ kubectl -n kuttl-test-able-dragon get ps -o 'jsonpath={.items[0].metadata.name}'
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ get_mysql_headless_fqdn recreate 2
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ local cluster=recreate
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ local index=2
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ echo recreate-mysql-2.recreate-mysql
    logger.go:42: 02:57:57 | recreate/10-write-data | + host=recreate-mysql-2.recreate-mysql
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ local 'command=SELECT * FROM myDB.myTable'
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ local 'uri=-h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ local pod=
    logger.go:42: 02:57:57 | recreate/10-write-data | +++ get_client_pod
    logger.go:42: 02:57:57 | recreate/10-write-data | +++ kubectl -n kuttl-test-able-dragon get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}'
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ client_pod=mysql-client
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ wait_pod mysql-client
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ local pod=mysql-client
    logger.go:42: 02:57:57 | recreate/10-write-data | ++ set +o xtrace
    logger.go:42: 02:57:58 | recreate/10-write-data | mysql-clienttrue
    logger.go:42: 02:57:58 | recreate/10-write-data | ++ kubectl -n kuttl-test-able-dragon exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-2.recreate-mysql -uroot -proot_password'
    logger.go:42: 02:57:58 | recreate/10-write-data | ++ sed -e 's/mysql: //'
    logger.go:42: 02:57:58 | recreate/10-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.'
    logger.go:42: 02:57:59 | recreate/10-write-data | + data='100500
    logger.go:42: 02:57:59 | recreate/10-write-data | 100501
    logger.go:42: 02:57:59 | recreate/10-write-data | 100502'
    logger.go:42: 02:57:59 | recreate/10-write-data | + kubectl create configmap -n kuttl-test-able-dragon 11-write-data-2 '--from-literal=data=100500
    logger.go:42: 02:57:59 | recreate/10-write-data | 100501
    logger.go:42: 02:57:59 | recreate/10-write-data | 100502'
    logger.go:42: 02:57:59 | recreate/10-write-data | configmap/11-write-data-2 created
    logger.go:42: 02:58:00 | recreate/10-write-data | test step completed 10-write-data
    logger.go:42: 02:58:00 | recreate/98-drop-finalizer | starting test step 98-drop-finalizer
    logger.go:42: 02:58:01 | recreate/98-drop-finalizer | PerconaServerMySQL:kuttl-test-able-dragon/recreate updated
    logger.go:42: 02:58:01 | recreate/98-drop-finalizer | test step completed 98-drop-finalizer
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | running command: [sh -c set -o errexit
        set -o xtrace
        
        source ../../functions
        
        destroy_operator]
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | + source ../../functions
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ realpath ../../..
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | ++++ pwd
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/tests/recreate
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | ++ test_name=recreate
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/vars.sh
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-874
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/deploy
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-874/e2e-tests/conf
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/ps/recreate
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-874
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-874
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export VERSION=PR-874-59fcfc82
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ VERSION=PR-874-59fcfc82
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-874-59fcfc82
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=1.4.0
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ export CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ CERT_MANAGER_VER=1.16.3
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | ++++ which gdate
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-874/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin)
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | ++++ which date
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ date=/usr/bin/date
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ oc get projects
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ :
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ kubectl get nodes
    logger.go:42: 02:58:01 | recreate/99-remove-cluster-gracefully | +++ grep '^minikube'
    logger.go:42: 02:58:02 | recreate/99-remove-cluster-gracefully | + destroy_operator
    logger.go:42: 02:58:02 | recreate/99-remove-cluster-gracefully | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0
    logger.go:42: 02:58:02 | recreate/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
    logger.go:42: 02:58:02 | recreate/99-remove-cluster-gracefully | deployment.apps "percona-server-mysql-operator" force deleted
    logger.go:42: 02:58:02 | recreate/99-remove-cluster-gracefully | + [[ -n ps-operator ]]
    logger.go:42: 02:58:02 | recreate/99-remove-cluster-gracefully | + kubectl delete namespace ps-operator --force --grace-period=0
    logger.go:42: 02:58:02 | recreate/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
    logger.go:42: 02:58:02 | recreate/99-remove-cluster-gracefully | namespace "ps-operator" force deleted
    logger.go:42: 02:58:08 | recreate/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully
    logger.go:42: 02:58:08 | recreate | recreate events from ns kuttl-test-able-dragon:
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:26 +0000 UTC	Normal	Pod mysql-client	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/mysql-client to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:26 +0000 UTC	Normal	Pod mysql-client.spec.containers{mysql-client}		Pulled	Container image "percona/percona-server:8.0.33" already present on machine	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:26 +0000 UTC	Normal	Pod mysql-client.spec.containers{mysql-client}		Created	Created container: mysql-client	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:26 +0000 UTC	Normal	Pod mysql-client.spec.containers{mysql-client}		Started	Started container mysql-client	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:37 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-0		WaitForFirstConsumer	waiting for first consumer to be created before binding	persistentvolume-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:37 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-0		ExternalProvisioning	Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered.	persistentvolume-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:37 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-0		Provisioning	External provisioner is provisioning volume for claim "kuttl-test-able-dragon/datadir-recreate-mysql-0"	pd.csi.storage.gke.io_gke-d4aa26717dea44808cff-99df-f6a8-vm_5bf2e934-67de-4df6-b27b-4a199f8d6a39	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:37 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Claim datadir-recreate-mysql-0 Pod recreate-mysql-0 in StatefulSet recreate-mysql success	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:37 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Pod recreate-mysql-0 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:38 +0000 UTC	Normal	Pod recreate-orc-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:38 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulCreate	create Pod recreate-orc-0 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:39 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:39 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 215ms (215ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:39 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:39 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-0		ProvisioningSucceeded	Successfully provisioned volume pvc-9b4307fe-69c8-41c2-a365-612c496b5b8d	pd.csi.storage.gke.io_gke-d4aa26717dea44808cff-99df-f6a8-vm_5bf2e934-67de-4df6-b27b-4a199f8d6a39	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-mysql-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 167ms (167ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 143ms (143ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:41 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:49 +0000 UTC	Normal	Pod recreate-mysql-0		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-9b4307fe-69c8-41c2-a365-612c496b5b8d" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:50 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:50 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 252ms (252ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:51 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:51 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:53 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:53 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 144ms (144ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:53 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 138ms (138ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 215ms (215ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:45:54 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:13 +0000 UTC	Normal	Pod recreate-orc-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:13 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulCreate	create Pod recreate-orc-1 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:14 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:14 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 207ms (207ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:14 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:14 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:16 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:16 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 148ms (148ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:16 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:16 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:16 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:16 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 137ms (137ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:16 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:16 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:25 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-1		WaitForFirstConsumer	waiting for first consumer to be created before binding	persistentvolume-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:25 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-1		ExternalProvisioning	Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered.	persistentvolume-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:25 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-1		Provisioning	External provisioner is provisioning volume for claim "kuttl-test-able-dragon/datadir-recreate-mysql-1"	pd.csi.storage.gke.io_gke-d4aa26717dea44808cff-99df-f6a8-vm_5bf2e934-67de-4df6-b27b-4a199f8d6a39	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:25 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Claim datadir-recreate-mysql-1 Pod recreate-mysql-1 in StatefulSet recreate-mysql success	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:25 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Pod recreate-mysql-1 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:29 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-1		ProvisioningSucceeded	Successfully provisioned volume pvc-5ea9e19e-6cf5-4a86-9960-c52229c937ab	pd.csi.storage.gke.io_gke-d4aa26717dea44808cff-99df-f6a8-vm_5bf2e934-67de-4df6-b27b-4a199f8d6a39	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:29 +0000 UTC	Normal	Pod recreate-haproxy-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:29 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulCreate	create Pod recreate-haproxy-0 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:29 +0000 UTC	Normal	Pod recreate-mysql-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:30 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:30 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 256ms (256ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:30 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:30 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:32 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:32 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 142ms (142ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:32 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:33 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:33 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:33 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 146ms (146ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:33 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:33 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:33 +0000 UTC	Normal	Pod recreate-haproxy-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:33 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulCreate	create Pod recreate-haproxy-1 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:35 +0000 UTC	Warning	Pod recreate-haproxy-1		FailedMount	MountVolume.SetUp failed for volume "config" : failed to sync configmap cache: timed out waiting for the condition	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:35 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:36 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 199ms (199ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:36 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:36 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:37 +0000 UTC	Normal	Pod recreate-mysql-1		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-5ea9e19e-6cf5-4a86-9960-c52229c937ab" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:38 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:38 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 142ms (142ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 144ms (144ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-haproxy-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulCreate	create Pod recreate-haproxy-2 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 216ms (216ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:39 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 209ms (209ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 130ms (130ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:40 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:41 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 148ms (148ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:41 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:41 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:41 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:41 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 119ms (119ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:41 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:41 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:42 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:42 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 144ms (144ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:42 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:42 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:42 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:42 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 136ms (136ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:42 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:42 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:49 +0000 UTC	Normal	Pod recreate-orc-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:49 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulCreate	create Pod recreate-orc-2 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:50 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:51 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 313ms (313ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:51 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:51 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:52 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:52 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 128ms (128ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:53 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:53 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:53 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:53 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 132ms (132ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:53 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:53 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:59 +0000 UTC	Warning	Pod recreate-mysql-1.spec.containers{mysql}		Unhealthy	Startup probe failed: 2025/03/26 02:46:58 Waiting for MySQL ready state
        2025/03/26 02:46:58 MySQL is ready
        2025/03/26 02:46:58 Peers: [6135386463393266.recreate-mysql-unready.kuttl-test-able-dragon 6637313833643834.recreate-mysql-unready.kuttl-test-able-dragon]
        2025/03/26 02:46:58 FQDN: recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:46:58 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon]
        2025/03/26 02:46:58 lookup recreate-mysql-1 [10.56.96.26]
        2025/03/26 02:46:58 PodIP: 10.56.96.26
        2025/03/26 02:46:58 lookup recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon [10.56.98.38]
        2025/03/26 02:46:58 PrimaryIP: 10.56.98.38
        2025/03/26 02:46:58 Donor: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:46:58 Opening connection to 10.56.96.26
        2025/03/26 02:46:58 Clone required: true
        2025/03/26 02:46:58 Checking if a clone in progress
        2025/03/26 02:46:58 Clone in progress: false
        2025/03/26 02:46:58 Cloning from recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:46:59 Clone finished. Restarting container...
        	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:46:59 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Killing	Container mysql failed startup probe, will be restarted	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:03 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 125ms (125ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:33 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-2		WaitForFirstConsumer	waiting for first consumer to be created before binding	persistentvolume-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:33 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-2		ExternalProvisioning	Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered.	persistentvolume-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:33 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-2		Provisioning	External provisioner is provisioning volume for claim "kuttl-test-able-dragon/datadir-recreate-mysql-2"	pd.csi.storage.gke.io_gke-d4aa26717dea44808cff-99df-f6a8-vm_5bf2e934-67de-4df6-b27b-4a199f8d6a39	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:33 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Claim datadir-recreate-mysql-2 Pod recreate-mysql-2 in StatefulSet recreate-mysql success	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:33 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Pod recreate-mysql-2 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:37 +0000 UTC	Normal	PersistentVolumeClaim datadir-recreate-mysql-2		ProvisioningSucceeded	Successfully provisioned volume pvc-0e868299-c8ad-493f-95dc-b3c9cdfa4829	pd.csi.storage.gke.io_gke-d4aa26717dea44808cff-99df-f6a8-vm_5bf2e934-67de-4df6-b27b-4a199f8d6a39	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:38 +0000 UTC	Normal	Pod recreate-mysql-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:47 +0000 UTC	Normal	Pod recreate-mysql-2		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-0e868299-c8ad-493f-95dc-b3c9cdfa4829" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:48 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:48 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 206ms (206ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:48 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:49 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 149ms (149ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 126ms (126ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 143ms (143ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:47:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:48:09 +0000 UTC	Warning	Pod recreate-mysql-2.spec.containers{mysql}		Unhealthy	Startup probe failed: 2025/03/26 02:48:08 Waiting for MySQL ready state
        2025/03/26 02:48:08 MySQL is ready
        2025/03/26 02:48:08 Peers: [3130626266386566.recreate-mysql-unready.kuttl-test-able-dragon 6135386463393266.recreate-mysql-unready.kuttl-test-able-dragon 6637313833643834.recreate-mysql-unready.kuttl-test-able-dragon]
        2025/03/26 02:48:08 FQDN: recreate-mysql-2.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:48:08 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon recreate-mysql-2.recreate-mysql.kuttl-test-able-dragon]
        2025/03/26 02:48:08 lookup recreate-mysql-2 [10.56.97.31]
        2025/03/26 02:48:08 PodIP: 10.56.97.31
        2025/03/26 02:48:08 lookup recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon [10.56.98.38]
        2025/03/26 02:48:08 PrimaryIP: 10.56.98.38
        2025/03/26 02:48:08 Donor: recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:48:08 Opening connection to 10.56.97.31
        2025/03/26 02:48:08 Clone required: true
        2025/03/26 02:48:08 Checking if a clone in progress
        2025/03/26 02:48:08 Clone in progress: false
        2025/03/26 02:48:08 Cloning from recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:48:09 Clone finished. Restarting container...
        	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:48:09 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Killing	Container mysql failed startup probe, will be restarted	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:48:13 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 139ms (139ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:12 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:12 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:12 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulDelete	delete Pod recreate-mysql-2 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:13 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:13 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:13 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulDelete	delete Pod recreate-haproxy-2 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:13 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:13 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:13 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:13 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulDelete	delete Pod recreate-orc-2 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:14 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:14 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:14 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulDelete	delete Pod recreate-haproxy-1 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:15 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:15 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:15 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulDelete	delete Pod recreate-haproxy-0 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:17 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:17 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:17 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:17 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulDelete	delete Pod recreate-mysql-1 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:21 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:21 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:21 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:21 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulDelete	delete Pod recreate-mysql-0 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:25 +0000 UTC	Warning	Pod recreate-mysql-0.spec.containers{mysql}		Unhealthy	Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:43 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:43 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:49:43 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulDelete	delete Pod recreate-orc-1 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:14 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:14 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:14 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulDelete	delete Pod recreate-orc-0 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:48 +0000 UTC	Normal	Pod recreate-mysql-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:48 +0000 UTC	Normal	Pod recreate-orc-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:49 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:49 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 222ms (222ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:49 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:49 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:51 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:51 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 140ms (140ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:51 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:51 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:51 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:51 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 148ms (148ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:51 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:52 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:55 +0000 UTC	Normal	Pod recreate-mysql-0		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-9b4307fe-69c8-41c2-a365-612c496b5b8d" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:57 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:57 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 200ms (200ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:57 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:57 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:59 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:59 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 129ms (129ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:50:59 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 119ms (119ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 137ms (137ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:00 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:24 +0000 UTC	Normal	Pod recreate-orc-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:25 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:25 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 191ms (191ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:25 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:25 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:28 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:28 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 119ms (120ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:28 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:28 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:28 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:28 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 126ms (126ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:28 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:28 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:32 +0000 UTC	Normal	Pod recreate-mysql-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:35 +0000 UTC	Normal	Pod recreate-haproxy-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:35 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:37 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 1.178s (1.178s including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:37 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:37 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:38 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:39 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 139ms (139ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:39 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:39 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:39 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:39 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 118ms (118ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:39 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:39 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:39 +0000 UTC	Normal	Pod recreate-mysql-1		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-5ea9e19e-6cf5-4a86-9960-c52229c937ab" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:40 +0000 UTC	Normal	Pod recreate-haproxy-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:40 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:40 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 231ms (231ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:40 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:40 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:42 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:42 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 165ms (165ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:42 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 141ms (141ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-haproxy-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 218ms (219ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:43 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:44 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:44 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 172ms (172ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:44 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:44 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:45 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:45 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 154ms (154ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:45 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 131ms (131ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 118ms (118ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 138ms (138ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 140ms (141ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:51:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:00 +0000 UTC	Normal	Pod recreate-orc-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:01 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:01 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 209ms (209ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:01 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:01 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:03 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:03 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 126ms (126ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:03 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:03 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:03 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:03 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 144ms (144ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:03 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:03 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:04 +0000 UTC	Warning	Pod recreate-mysql-1.spec.containers{mysql}		Unhealthy	Startup probe failed: 2025/03/26 02:52:03 Waiting for MySQL ready state
        2025/03/26 02:52:03 MySQL is ready
        2025/03/26 02:52:03 Peers: [3764383039363362.recreate-mysql-unready.kuttl-test-able-dragon 6335353736383262.recreate-mysql-unready.kuttl-test-able-dragon]
        2025/03/26 02:52:03 FQDN: recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:52:03 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon]
        2025/03/26 02:52:03 lookup recreate-mysql-1 [10.56.96.29]
        2025/03/26 02:52:03 PodIP: 10.56.96.29
        2025/03/26 02:52:03 lookup recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon [10.56.98.41]
        2025/03/26 02:52:03 PrimaryIP: 10.56.98.41
        2025/03/26 02:52:03 Donor: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:52:03 Opening connection to 10.56.96.29
        2025/03/26 02:52:03 Clone required: true
        2025/03/26 02:52:03 Checking if a clone in progress
        2025/03/26 02:52:03 Clone in progress: false
        2025/03/26 02:52:03 Cloning from recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:52:04 Clone finished. Restarting container...
        	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:04 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Killing	Container mysql failed startup probe, will be restarted	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:08 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 177ms (177ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:38 +0000 UTC	Normal	Pod recreate-mysql-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:45 +0000 UTC	Normal	Pod recreate-mysql-2		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-0e868299-c8ad-493f-95dc-b3c9cdfa4829" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:47 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:47 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 195ms (195ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:47 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:47 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:49 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:49 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 125ms (125ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:49 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:49 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:49 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 136ms (137ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 112ms (112ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:52:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:53:08 +0000 UTC	Warning	Pod recreate-mysql-2.spec.containers{mysql}		Unhealthy	Startup probe failed: 2025/03/26 02:53:07 Waiting for MySQL ready state
        2025/03/26 02:53:07 MySQL is ready
        2025/03/26 02:53:07 Peers: [3133393134613461.recreate-mysql-unready.kuttl-test-able-dragon 3764383039363362.recreate-mysql-unready.kuttl-test-able-dragon 6335353736383262.recreate-mysql-unready.kuttl-test-able-dragon]
        2025/03/26 02:53:07 FQDN: recreate-mysql-2.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:53:07 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon recreate-mysql-2.recreate-mysql.kuttl-test-able-dragon]
        2025/03/26 02:53:07 lookup recreate-mysql-2 [10.56.97.34]
        2025/03/26 02:53:07 PodIP: 10.56.97.34
        2025/03/26 02:53:07 lookup recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon [10.56.98.41]
        2025/03/26 02:53:07 PrimaryIP: 10.56.98.41
        2025/03/26 02:53:07 Donor: recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:53:07 Opening connection to 10.56.97.34
        2025/03/26 02:53:07 Clone required: true
        2025/03/26 02:53:07 Checking if a clone in progress
        2025/03/26 02:53:07 Clone in progress: false
        2025/03/26 02:53:07 Cloning from recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:53:08 Clone finished. Restarting container...
        	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:53:08 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Killing	Container mysql failed startup probe, will be restarted	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:53:11 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 134ms (134ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:02 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:02 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:02 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:07 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:07 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:07 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:17 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:18 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:18 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:18 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:18 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:21 +0000 UTC	Warning	Pod recreate-mysql-0.spec.containers{mysql}		Unhealthy	Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:42 +0000 UTC	Normal	Pod recreate-mysql-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:42 +0000 UTC	Warning	Pod recreate-mysql-0		FailedAttachVolume	Multi-Attach error for volume "pvc-9b4307fe-69c8-41c2-a365-612c496b5b8d" Volume is already exclusively attached to one node and can't be attached to another	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:42 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Pod recreate-mysql-0 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:58 +0000 UTC	Normal	Pod recreate-mysql-0		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-9b4307fe-69c8-41c2-a365-612c496b5b8d" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:59 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:59 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 192ms (192ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:59 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:54:59 +0000 UTC	Normal	Pod recreate-mysql-0.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 121ms (121ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 131ms (131ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:01 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:02 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 143ms (143ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:02 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:02 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:03 +0000 UTC	Normal	Pod recreate-orc-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:03 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:03 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulCreate	create Pod recreate-orc-0 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:04 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 197ms (197ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:04 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:04 +0000 UTC	Normal	Pod recreate-orc-0.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:05 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:05 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 153ms (153ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:05 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:05 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:06 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:06 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 136ms (136ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:06 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:06 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:34 +0000 UTC	Normal	Pod recreate-mysql-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:34 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Pod recreate-mysql-1 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:38 +0000 UTC	Normal	Pod recreate-haproxy-0	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-0 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:38 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:38 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 200ms (201ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:38 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:38 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulCreate	create Pod recreate-haproxy-0 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:39 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:39 +0000 UTC	Normal	Pod recreate-orc-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:39 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:39 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulCreate	create Pod recreate-orc-1 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 117ms (117ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 116ms (116ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 224ms (224ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:40 +0000 UTC	Normal	Pod recreate-orc-1.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:41 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:41 +0000 UTC	Normal	Pod recreate-haproxy-1	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-1 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-rqxl	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:41 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulCreate	create Pod recreate-haproxy-1 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 189ms (189ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-mysql-1		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-5ea9e19e-6cf5-4a86-9960-c52229c937ab" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 128ms (128ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 134ms (134ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:42 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:44 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:44 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:44 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 172ms (172ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:44 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:44 +0000 UTC	Normal	Pod recreate-mysql-1.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:45 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 890ms (890ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:45 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:45 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:45 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:45 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 175ms (175ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-haproxy-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-haproxy-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	StatefulSet.apps recreate-haproxy		SuccessfulCreate	create Pod recreate-haproxy-2 in StatefulSet recreate-haproxy successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 166ms (166ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 167ms (167ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:46 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 423ms (423ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:47 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:47 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 198ms (198ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:47 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Created	Created container: haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:47 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.initContainers{haproxy-init}		Started	Started container haproxy-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:47 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:47 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:49 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:49 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 126ms (126ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:49 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Created	Created container: haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:49 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Started	Started container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:49 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-haproxy"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:49 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 340ms (340ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:49 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:55:50 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:05 +0000 UTC	Warning	Pod recreate-mysql-1.spec.containers{mysql}		Unhealthy	Startup probe failed: 2025/03/26 02:56:03 Waiting for MySQL ready state
        2025/03/26 02:56:03 MySQL is ready
        2025/03/26 02:56:03 Peers: [3830613738326163.recreate-mysql-unready.kuttl-test-able-dragon 3931643933663864.recreate-mysql-unready.kuttl-test-able-dragon]
        2025/03/26 02:56:03 FQDN: recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:56:03 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon]
        2025/03/26 02:56:03 lookup recreate-mysql-1 [10.56.98.46]
        2025/03/26 02:56:03 PodIP: 10.56.98.46
        2025/03/26 02:56:03 lookup recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon [10.56.96.30]
        2025/03/26 02:56:03 PrimaryIP: 10.56.96.30
        2025/03/26 02:56:03 Donor: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:56:03 Opening connection to 10.56.98.46
        2025/03/26 02:56:03 Clone required: true
        2025/03/26 02:56:03 Checking if a clone in progress
        2025/03/26 02:56:03 Clone in progress: false
        2025/03/26 02:56:03 Cloning from recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:56:05 Clone finished. Restarting container...
        	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:05 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Killing	Container mysql failed startup probe, will be restarted	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:08 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 113ms (114ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:14 +0000 UTC	Normal	Pod recreate-orc-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-orc-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-9k5c	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:14 +0000 UTC	Normal	StatefulSet.apps recreate-orc		SuccessfulCreate	create Pod recreate-orc-2 in StatefulSet recreate-orc successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:15 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:15 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 179ms (179ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:15 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Created	Created container: orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:15 +0000 UTC	Normal	Pod recreate-orc-2.spec.initContainers{orc-init}		Started	Started container orc-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:17 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:17 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 150ms (150ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:17 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Created	Created container: orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:17 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Started	Started container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:17 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:18 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 152ms (152ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:18 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Created	Created container: mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:18 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Started	Started container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:39 +0000 UTC	Normal	Pod recreate-mysql-2	Binding	Scheduled	Successfully assigned kuttl-test-able-dragon/recreate-mysql-2 to gke-jen-ps-874-59fcfc82--default-pool-7f598226-77v7	default-scheduler	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:39 +0000 UTC	Normal	StatefulSet.apps recreate-mysql		SuccessfulCreate	create Pod recreate-mysql-2 in StatefulSet recreate-mysql successful	statefulset-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:47 +0000 UTC	Normal	Pod recreate-mysql-2		SuccessfulAttachVolume	AttachVolume.Attach succeeded for volume "pvc-0e868299-c8ad-493f-95dc-b3c9cdfa4829" 	attachdetach-controller	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:48 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:48 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:PR-874-59fcfc82" in 191ms (191ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:48 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Created	Created container: mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:48 +0000 UTC	Normal	Pod recreate-mysql-2.spec.initContainers{mysql-init}		Started	Started container mysql-init	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-psmysql"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 129ms (129ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Created	Created container: mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Started	Started container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-backup"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 133ms (133ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:50 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Created	Created container: xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Started	Started container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Pulling	Pulling image "perconalab/percona-server-mysql-operator:main-toolkit"	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 128ms (128ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Created	Created container: pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:56:51 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Started	Started container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:57:09 +0000 UTC	Warning	Pod recreate-mysql-2.spec.containers{mysql}		Unhealthy	Startup probe failed: 2025/03/26 02:57:08 Waiting for MySQL ready state
        2025/03/26 02:57:08 MySQL is ready
        2025/03/26 02:57:08 Peers: [3630313131316131.recreate-mysql-unready.kuttl-test-able-dragon 3830613738326163.recreate-mysql-unready.kuttl-test-able-dragon 3931643933663864.recreate-mysql-unready.kuttl-test-able-dragon]
        2025/03/26 02:57:08 FQDN: recreate-mysql-2.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:57:08 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon recreate-mysql-2.recreate-mysql.kuttl-test-able-dragon]
        2025/03/26 02:57:08 lookup recreate-mysql-2 [10.56.97.37]
        2025/03/26 02:57:08 PodIP: 10.56.97.37
        2025/03/26 02:57:08 lookup recreate-mysql-0.recreate-mysql.kuttl-test-able-dragon [10.56.96.30]
        2025/03/26 02:57:08 PrimaryIP: 10.56.96.30
        2025/03/26 02:57:08 Donor: recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:57:08 Opening connection to 10.56.97.37
        2025/03/26 02:57:08 Clone required: true
        2025/03/26 02:57:08 Checking if a clone in progress
        2025/03/26 02:57:08 Clone in progress: false
        2025/03/26 02:57:08 Cloning from recreate-mysql-1.recreate-mysql.kuttl-test-able-dragon
        2025/03/26 02:57:09 Clone finished. Restarting container...
        	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:57:09 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Killing	Container mysql failed startup probe, will be restarted	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:57:13 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Pulled	Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 118ms (118ms including waiting)	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:01 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:01 +0000 UTC	Normal	Pod recreate-orc-0.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:01 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:01 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{orc}		Killing	Stopping container orc	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:01 +0000 UTC	Normal	Pod recreate-orc-2.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-haproxy-0.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-haproxy-1.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{haproxy}		Killing	Stopping container haproxy	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-haproxy-2.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-mysql-1.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-mysql-2.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:02 +0000 UTC	Normal	Pod recreate-orc-1.spec.containers{mysql-monit}		Killing	Stopping container mysql-monit	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:03 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{mysql}		Killing	Stopping container mysql	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:03 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{pt-heartbeat}		Killing	Stopping container pt-heartbeat	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:03 +0000 UTC	Normal	Pod recreate-mysql-0.spec.containers{xtrabackup}		Killing	Stopping container xtrabackup	kubelet	
    logger.go:42: 02:58:08 | recreate | 2025-03-26 02:58:04 +0000 UTC	Warning	Pod recreate-mysql-0.spec.containers{mysql}		Unhealthy	Readiness probe failed: 2025/03/26 02:58:04 MySQL state is not ready...
        	kubelet	
    logger.go:42: 02:58:08 | recreate | Deleting namespace: kuttl-test-able-dragon
=== NAME  kuttl
    harness.go:407: run tests finished
    harness.go:515: cleaning up
    harness.go:572: removing temp folder: ""
--- PASS: kuttl (815.10s)
    --- PASS: kuttl/harness (0.00s)
        --- PASS: kuttl/harness/recreate (814.67s)
PASS