=== RUN kuttl harness.go:464: starting setup harness.go:255: running tests using configured kubeconfig. harness.go:278: Successful connection to cluster at: https://34.29.203.205 harness.go:363: running tests harness.go:75: going to run test suite with timeout of 180 seconds for each step harness.go:375: testsuite: e2e-tests/tests has 34 tests === RUN kuttl/harness === RUN kuttl/harness/recreate === PAUSE kuttl/harness/recreate === CONT kuttl/harness/recreate logger.go:42: 17:39:14 | recreate | Creating namespace: kuttl-test-ethical-krill logger.go:42: 17:39:14 | recreate/0-deploy-operator | starting test step 0-deploy-operator logger.go:42: 17:39:14 | recreate/0-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep deploy_operator deploy_non_tls_cluster_secrets deploy_tls_cluster_secrets deploy_client] logger.go:42: 17:39:14 | recreate/0-deploy-operator | + source ../../functions logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ realpath ../../.. logger.go:42: 17:39:14 | recreate/0-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:39:14 | recreate/0-deploy-operator | ++++ pwd logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:39:14 | recreate/0-deploy-operator | ++ test_name=recreate logger.go:42: 17:39:14 | recreate/0-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:39:14 | recreate/0-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ GIT_BRANCH=PR-825 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ VERSION=PR-825-808887c6 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:39:14 | recreate/0-deploy-operator | ++++ which gdate logger.go:42: 17:39:14 | recreate/0-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:39:14 | recreate/0-deploy-operator | ++++ which date logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ date=/usr/bin/date logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ oc get projects logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ : logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ kubectl get nodes logger.go:42: 17:39:14 | recreate/0-deploy-operator | +++ grep '^minikube' logger.go:42: 17:39:15 | recreate/0-deploy-operator | + init_temp_dir logger.go:42: 17:39:15 | recreate/0-deploy-operator | + rm -rf /tmp/kuttl/ps/recreate logger.go:42: 17:39:15 | recreate/0-deploy-operator | + mkdir -p /tmp/kuttl/ps/recreate logger.go:42: 17:39:15 | recreate/0-deploy-operator | + deploy_operator logger.go:42: 17:39:15 | recreate/0-deploy-operator | + destroy_operator logger.go:42: 17:39:15 | recreate/0-deploy-operator | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 17:39:15 | recreate/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 17:39:15 | recreate/0-deploy-operator | Error from server (NotFound): deployments.apps "percona-server-mysql-operator" not found logger.go:42: 17:39:15 | recreate/0-deploy-operator | + true logger.go:42: 17:39:15 | recreate/0-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 17:39:15 | recreate/0-deploy-operator | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 17:39:15 | recreate/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 17:39:16 | recreate/0-deploy-operator | Error from server (NotFound): namespaces "ps-operator" not found logger.go:42: 17:39:16 | recreate/0-deploy-operator | + true logger.go:42: 17:39:16 | recreate/0-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 17:39:16 | recreate/0-deploy-operator | + create_namespace ps-operator logger.go:42: 17:39:16 | recreate/0-deploy-operator | + local namespace=ps-operator logger.go:42: 17:39:16 | recreate/0-deploy-operator | + [[ -n '' ]] logger.go:42: 17:39:16 | recreate/0-deploy-operator | + kubectl delete namespace ps-operator --ignore-not-found logger.go:42: 17:39:16 | recreate/0-deploy-operator | + kubectl wait --for=delete namespace ps-operator logger.go:42: 17:39:16 | recreate/0-deploy-operator | + kubectl create namespace ps-operator logger.go:42: 17:39:17 | recreate/0-deploy-operator | namespace/ps-operator created logger.go:42: 17:39:17 | recreate/0-deploy-operator | + kubectl -n ps-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy/crd.yaml logger.go:42: 17:39:18 | recreate/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 17:39:18 | recreate/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 17:39:19 | recreate/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 17:39:19 | recreate/0-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 17:39:19 | recreate/0-deploy-operator | + kubectl -n ps-operator apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy/cw-rbac.yaml logger.go:42: 17:39:20 | recreate/0-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 17:39:20 | recreate/0-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 17:39:20 | recreate/0-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged logger.go:42: 17:39:21 | recreate/0-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 17:39:21 | recreate/0-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged logger.go:42: 17:39:21 | recreate/0-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 17:39:21 | recreate/0-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "DEBUG"' logger.go:42: 17:39:21 | recreate/0-deploy-operator | + kubectl -n ps-operator apply -f - logger.go:42: 17:39:21 | recreate/0-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:39:21 | recreate/0-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-825-808887c6"' /mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy/cw-operator.yaml logger.go:42: 17:39:22 | recreate/0-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 17:39:23 | recreate/0-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 17:39:23 | recreate/0-deploy-operator | + deploy_non_tls_cluster_secrets logger.go:42: 17:39:23 | recreate/0-deploy-operator | + kubectl -n kuttl-test-ethical-krill apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf/secrets.yaml logger.go:42: 17:39:23 | recreate/0-deploy-operator | secret/test-secrets created logger.go:42: 17:39:23 | recreate/0-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 17:39:23 | recreate/0-deploy-operator | + kubectl -n kuttl-test-ethical-krill apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf/ssl-secret.yaml logger.go:42: 17:39:24 | recreate/0-deploy-operator | secret/test-ssl created logger.go:42: 17:39:24 | recreate/0-deploy-operator | + deploy_client logger.go:42: 17:39:24 | recreate/0-deploy-operator | + kubectl -n kuttl-test-ethical-krill apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf/client.yaml logger.go:42: 17:39:25 | recreate/0-deploy-operator | pod/mysql-client created logger.go:42: 17:39:26 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 17:39:26 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 17:39:26 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 17:39:27 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 17:39:27 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 17:39:28 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 17:39:29 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 17:39:29 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 17:39:29 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 17:39:31 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 17:39:31 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 17:39:31 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 17:39:32 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 17:39:32 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 17:39:33 | recreate/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 17:39:34 | recreate/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 17:39:34 | recreate/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 17:39:34 | recreate/0-deploy-operator | INFO Found 1 resource(s). logger.go:42: 17:39:34 | recreate/0-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 17:39:34 | recreate/0-deploy-operator | percona-server-mysql-operator ps-operator 1 logger.go:42: 17:39:34 | recreate/0-deploy-operator | ASSERT PASS logger.go:42: 17:39:34 | recreate/0-deploy-operator | test step completed 0-deploy-operator logger.go:42: 17:39:34 | recreate/1-create-cluster | starting test step 1-create-cluster logger.go:42: 17:39:34 | recreate/1-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval '.spec.mysql.size=3' - \ | yq eval '.spec.proxy.haproxy.enabled=true' - \ | yq eval '.spec.proxy.haproxy.size=3' - \ | yq eval '.spec.orchestrator.enabled=true' - \ | yq eval '.spec.orchestrator.size=3' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 17:39:34 | recreate/1-create-cluster | + source ../../functions logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ realpath ../../.. logger.go:42: 17:39:34 | recreate/1-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:39:34 | recreate/1-create-cluster | ++++ pwd logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:39:34 | recreate/1-create-cluster | ++ test_name=recreate logger.go:42: 17:39:34 | recreate/1-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:39:34 | recreate/1-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ GIT_BRANCH=PR-825 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ VERSION=PR-825-808887c6 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:39:34 | recreate/1-create-cluster | ++++ which gdate logger.go:42: 17:39:34 | recreate/1-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:39:34 | recreate/1-create-cluster | ++++ which date logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ date=/usr/bin/date logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ oc get projects logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ : logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ kubectl get nodes logger.go:42: 17:39:34 | recreate/1-create-cluster | +++ grep '^minikube' logger.go:42: 17:39:35 | recreate/1-create-cluster | + get_cr logger.go:42: 17:39:35 | recreate/1-create-cluster | + local name_suffix= logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval .spec.mysql.size=3 - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval .spec.orchestrator.size=3 - logger.go:42: 17:39:35 | recreate/1-create-cluster | + kubectl -n kuttl-test-ethical-krill apply -f - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-825-808887c6"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | + '[' -n '' ']' logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.metadata.name="%s"' recreate logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.metadata.name="recreate"' /mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy/cr.yaml logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 17:39:35 | recreate/1-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:39:35 | recreate/1-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 17:39:36 | recreate/1-create-cluster | perconaservermysql.ps.percona.com/recreate created logger.go:42: 17:45:47 | recreate/1-create-cluster | test step completed 1-create-cluster logger.go:42: 17:45:47 | recreate/2-write-data | starting test step 2-write-data logger.go:42: 17:45:47 | recreate/2-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" for i in 0 1 2; do host=$(get_mysql_headless_fqdn $(get_cluster_name) $i) data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${host} -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 02-write-data-${i} --from-literal=data="${data}" done] logger.go:42: 17:45:47 | recreate/2-write-data | + source ../../functions logger.go:42: 17:45:47 | recreate/2-write-data | +++ realpath ../../.. logger.go:42: 17:45:47 | recreate/2-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:45:47 | recreate/2-write-data | ++++ pwd logger.go:42: 17:45:47 | recreate/2-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:45:47 | recreate/2-write-data | ++ test_name=recreate logger.go:42: 17:45:47 | recreate/2-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:45:47 | recreate/2-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:45:47 | recreate/2-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:45:47 | recreate/2-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:45:47 | recreate/2-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:45:47 | recreate/2-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:45:47 | recreate/2-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:45:47 | recreate/2-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:45:47 | recreate/2-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:45:47 | recreate/2-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:45:47 | recreate/2-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:45:47 | recreate/2-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:45:47 | recreate/2-write-data | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:45:47 | recreate/2-write-data | +++ GIT_BRANCH=PR-825 logger.go:42: 17:45:47 | recreate/2-write-data | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:45:47 | recreate/2-write-data | +++ VERSION=PR-825-808887c6 logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:45:47 | recreate/2-write-data | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:45:47 | recreate/2-write-data | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:45:47 | recreate/2-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:45:47 | recreate/2-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:45:47 | recreate/2-write-data | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:45:47 | recreate/2-write-data | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:45:47 | recreate/2-write-data | ++++ which gdate logger.go:42: 17:45:47 | recreate/2-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:45:47 | recreate/2-write-data | ++++ which date logger.go:42: 17:45:47 | recreate/2-write-data | +++ date=/usr/bin/date logger.go:42: 17:45:47 | recreate/2-write-data | +++ oc get projects logger.go:42: 17:45:47 | recreate/2-write-data | +++ : logger.go:42: 17:45:47 | recreate/2-write-data | +++ kubectl get nodes logger.go:42: 17:45:47 | recreate/2-write-data | +++ grep '^minikube' logger.go:42: 17:45:49 | recreate/2-write-data | +++ get_cluster_name logger.go:42: 17:45:49 | recreate/2-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:45:49 | recreate/2-write-data | ++ get_haproxy_svc recreate logger.go:42: 17:45:49 | recreate/2-write-data | ++ local cluster=recreate logger.go:42: 17:45:49 | recreate/2-write-data | ++ echo recreate-haproxy logger.go:42: 17:45:49 | recreate/2-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h recreate-haproxy -uroot -proot_password' logger.go:42: 17:45:49 | recreate/2-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 17:45:49 | recreate/2-write-data | + local 'uri=-h recreate-haproxy -uroot -proot_password' logger.go:42: 17:45:49 | recreate/2-write-data | + local pod= logger.go:42: 17:45:49 | recreate/2-write-data | ++ get_client_pod logger.go:42: 17:45:49 | recreate/2-write-data | ++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:45:49 | recreate/2-write-data | + client_pod=mysql-client logger.go:42: 17:45:49 | recreate/2-write-data | + wait_pod mysql-client logger.go:42: 17:45:49 | recreate/2-write-data | + local pod=mysql-client logger.go:42: 17:45:49 | recreate/2-write-data | + set +o xtrace logger.go:42: 17:45:50 | recreate/2-write-data | mysql-clienttrue logger.go:42: 17:45:50 | recreate/2-write-data | + kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h recreate-haproxy -uroot -proot_password' logger.go:42: 17:45:50 | recreate/2-write-data | + sed -e 's/mysql: //' logger.go:42: 17:45:50 | recreate/2-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:45:52 | recreate/2-write-data | + : logger.go:42: 17:45:52 | recreate/2-write-data | +++ get_cluster_name logger.go:42: 17:45:52 | recreate/2-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:45:52 | recreate/2-write-data | ++ get_haproxy_svc recreate logger.go:42: 17:45:52 | recreate/2-write-data | ++ local cluster=recreate logger.go:42: 17:45:52 | recreate/2-write-data | ++ echo recreate-haproxy logger.go:42: 17:45:52 | recreate/2-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h recreate-haproxy -uroot -proot_password' logger.go:42: 17:45:52 | recreate/2-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)' logger.go:42: 17:45:52 | recreate/2-write-data | + local 'uri=-h recreate-haproxy -uroot -proot_password' logger.go:42: 17:45:52 | recreate/2-write-data | + local pod= logger.go:42: 17:45:52 | recreate/2-write-data | ++ get_client_pod logger.go:42: 17:45:52 | recreate/2-write-data | ++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:45:53 | recreate/2-write-data | + client_pod=mysql-client logger.go:42: 17:45:53 | recreate/2-write-data | + wait_pod mysql-client logger.go:42: 17:45:53 | recreate/2-write-data | + local pod=mysql-client logger.go:42: 17:45:53 | recreate/2-write-data | + set +o xtrace logger.go:42: 17:45:53 | recreate/2-write-data | mysql-clienttrue logger.go:42: 17:45:53 | recreate/2-write-data | + kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h recreate-haproxy -uroot -proot_password' logger.go:42: 17:45:53 | recreate/2-write-data | + sed -e 's/mysql: //' logger.go:42: 17:45:53 | recreate/2-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:45:55 | recreate/2-write-data | + : logger.go:42: 17:45:55 | recreate/2-write-data | + for i in 0 1 2 logger.go:42: 17:45:55 | recreate/2-write-data | +++ get_cluster_name logger.go:42: 17:45:55 | recreate/2-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:45:55 | recreate/2-write-data | ++ get_mysql_headless_fqdn recreate 0 logger.go:42: 17:45:55 | recreate/2-write-data | ++ local cluster=recreate logger.go:42: 17:45:55 | recreate/2-write-data | ++ local index=0 logger.go:42: 17:45:55 | recreate/2-write-data | ++ echo recreate-mysql-0.recreate-mysql logger.go:42: 17:45:55 | recreate/2-write-data | + host=recreate-mysql-0.recreate-mysql logger.go:42: 17:45:55 | recreate/2-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:45:55 | recreate/2-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:45:55 | recreate/2-write-data | ++ local 'uri=-h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:45:55 | recreate/2-write-data | ++ local pod= logger.go:42: 17:45:55 | recreate/2-write-data | +++ get_client_pod logger.go:42: 17:45:55 | recreate/2-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:45:56 | recreate/2-write-data | ++ client_pod=mysql-client logger.go:42: 17:45:56 | recreate/2-write-data | ++ wait_pod mysql-client logger.go:42: 17:45:56 | recreate/2-write-data | ++ local pod=mysql-client logger.go:42: 17:45:56 | recreate/2-write-data | ++ set +o xtrace logger.go:42: 17:45:56 | recreate/2-write-data | mysql-clienttrue logger.go:42: 17:45:56 | recreate/2-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:45:56 | recreate/2-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:45:56 | recreate/2-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:45:57 | recreate/2-write-data | + data=100500 logger.go:42: 17:45:57 | recreate/2-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 02-write-data-0 --from-literal=data=100500 logger.go:42: 17:45:58 | recreate/2-write-data | configmap/02-write-data-0 created logger.go:42: 17:45:58 | recreate/2-write-data | + for i in 0 1 2 logger.go:42: 17:45:58 | recreate/2-write-data | +++ get_cluster_name logger.go:42: 17:45:58 | recreate/2-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:45:58 | recreate/2-write-data | ++ get_mysql_headless_fqdn recreate 1 logger.go:42: 17:45:58 | recreate/2-write-data | ++ local cluster=recreate logger.go:42: 17:45:58 | recreate/2-write-data | ++ local index=1 logger.go:42: 17:45:58 | recreate/2-write-data | ++ echo recreate-mysql-1.recreate-mysql logger.go:42: 17:45:58 | recreate/2-write-data | + host=recreate-mysql-1.recreate-mysql logger.go:42: 17:45:58 | recreate/2-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:45:58 | recreate/2-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:45:58 | recreate/2-write-data | ++ local 'uri=-h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:45:58 | recreate/2-write-data | ++ local pod= logger.go:42: 17:45:58 | recreate/2-write-data | +++ get_client_pod logger.go:42: 17:45:58 | recreate/2-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:45:59 | recreate/2-write-data | ++ client_pod=mysql-client logger.go:42: 17:45:59 | recreate/2-write-data | ++ wait_pod mysql-client logger.go:42: 17:45:59 | recreate/2-write-data | ++ local pod=mysql-client logger.go:42: 17:45:59 | recreate/2-write-data | ++ set +o xtrace logger.go:42: 17:45:59 | recreate/2-write-data | mysql-clienttrue logger.go:42: 17:45:59 | recreate/2-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:45:59 | recreate/2-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:45:59 | recreate/2-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:46:00 | recreate/2-write-data | + data=100500 logger.go:42: 17:46:00 | recreate/2-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 02-write-data-1 --from-literal=data=100500 logger.go:42: 17:46:01 | recreate/2-write-data | configmap/02-write-data-1 created logger.go:42: 17:46:01 | recreate/2-write-data | + for i in 0 1 2 logger.go:42: 17:46:01 | recreate/2-write-data | +++ get_cluster_name logger.go:42: 17:46:01 | recreate/2-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:46:01 | recreate/2-write-data | ++ get_mysql_headless_fqdn recreate 2 logger.go:42: 17:46:01 | recreate/2-write-data | ++ local cluster=recreate logger.go:42: 17:46:01 | recreate/2-write-data | ++ local index=2 logger.go:42: 17:46:01 | recreate/2-write-data | ++ echo recreate-mysql-2.recreate-mysql logger.go:42: 17:46:01 | recreate/2-write-data | + host=recreate-mysql-2.recreate-mysql logger.go:42: 17:46:01 | recreate/2-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:46:01 | recreate/2-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:46:01 | recreate/2-write-data | ++ local 'uri=-h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:46:01 | recreate/2-write-data | ++ local pod= logger.go:42: 17:46:01 | recreate/2-write-data | +++ get_client_pod logger.go:42: 17:46:01 | recreate/2-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:46:02 | recreate/2-write-data | ++ client_pod=mysql-client logger.go:42: 17:46:02 | recreate/2-write-data | ++ wait_pod mysql-client logger.go:42: 17:46:02 | recreate/2-write-data | ++ local pod=mysql-client logger.go:42: 17:46:02 | recreate/2-write-data | ++ set +o xtrace logger.go:42: 17:46:02 | recreate/2-write-data | mysql-clienttrue logger.go:42: 17:46:02 | recreate/2-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:46:02 | recreate/2-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:46:02 | recreate/2-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:46:04 | recreate/2-write-data | + data=100500 logger.go:42: 17:46:04 | recreate/2-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 02-write-data-2 --from-literal=data=100500 logger.go:42: 17:46:04 | recreate/2-write-data | configmap/02-write-data-2 created [controller-runtime] log.SetLogger(...) was never called; logs will not be displayed. Detected at: > goroutine 27 [running]: > runtime/debug.Stack() > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/runtime/debug/stack.go:24 +0x5e > sigs.k8s.io/controller-runtime/pkg/log.eventuallyFulfillRoot() > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/log.go:60 +0xcd > sigs.k8s.io/controller-runtime/pkg/log.(*delegatingLogSink).WithName(0xc0002f3c00, {0x184a055, 0x14}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/deleg.go:147 +0x3e > github.com/go-logr/logr.Logger.WithName({{0x1acb7d8, 0xc0002f3c00}, 0x0}, {0x184a055?, 0xc0006b5f80?}) > /home/mowsiany/go/pkg/mod/github.com/go-logr/logr@v1.2.4/logr.go:336 +0x36 > sigs.k8s.io/controller-runtime/pkg/client.newClient(0x131ead3?, {0x0, 0xc000462b60, {0x1accd90, 0xc0004f8300}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:122 +0xf1 > sigs.k8s.io/controller-runtime/pkg/client.New(0xc0000aed88?, {0x0, 0xc000462b60, {0x1accd90, 0xc0004f8300}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:103 +0x7d > github.com/kudobuilder/kuttl/pkg/test/utils.NewRetryClient(0xc0000aed88, {0x0, 0xc000462b60, {0x1accd90, 0xc0004f8300}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/utils/kubernetes.go:177 +0x127 > github.com/kudobuilder/kuttl/pkg/test.(*Harness).Client(0xc000373208, 0x81?) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:323 +0x18e > github.com/kudobuilder/kuttl/pkg/test.(*Step).Create(0xc0006216c0, 0xc0001331e0, {0xc000520a08, 0x18}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:177 +0x63 > github.com/kudobuilder/kuttl/pkg/test.(*Step).Run(0xc0006216c0, 0xc0001331e0, {0xc000520a08, 0x18}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:457 +0x24a > github.com/kudobuilder/kuttl/pkg/test.(*Case).Run(0xc000282640, 0xc0001331e0, 0xc00061ab40) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/case.go:373 +0xaeb > github.com/kudobuilder/kuttl/pkg/test.(*Harness).RunTests.func1.1(0xc0001331e0) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:401 +0x12e > testing.tRunner(0xc0001331e0, 0xc000122b70) > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1689 +0xfb > created by testing.(*T).Run in goroutine 26 > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1742 +0x390 logger.go:42: 17:46:05 | recreate/2-write-data | test step completed 2-write-data logger.go:42: 17:46:05 | recreate/3-pause | starting test step 3-pause logger.go:42: 17:46:05 | recreate/3-pause | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.pause=true' - \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval '.spec.mysql.size=3' - \ | yq eval '.spec.proxy.haproxy.enabled=true' - \ | yq eval '.spec.proxy.haproxy.size=3' - \ | yq eval '.spec.orchestrator.enabled=true' - \ | yq eval '.spec.orchestrator.size=3' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 17:46:05 | recreate/3-pause | + source ../../functions logger.go:42: 17:46:05 | recreate/3-pause | +++ realpath ../../.. logger.go:42: 17:46:05 | recreate/3-pause | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:46:05 | recreate/3-pause | ++++ pwd logger.go:42: 17:46:05 | recreate/3-pause | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:46:05 | recreate/3-pause | ++ test_name=recreate logger.go:42: 17:46:05 | recreate/3-pause | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:46:05 | recreate/3-pause | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:46:05 | recreate/3-pause | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:46:05 | recreate/3-pause | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:46:05 | recreate/3-pause | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:46:05 | recreate/3-pause | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:46:05 | recreate/3-pause | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:46:05 | recreate/3-pause | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:46:05 | recreate/3-pause | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:46:05 | recreate/3-pause | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:46:05 | recreate/3-pause | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:46:05 | recreate/3-pause | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:46:05 | recreate/3-pause | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:46:05 | recreate/3-pause | +++ GIT_BRANCH=PR-825 logger.go:42: 17:46:05 | recreate/3-pause | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:46:05 | recreate/3-pause | +++ VERSION=PR-825-808887c6 logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:46:05 | recreate/3-pause | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:46:05 | recreate/3-pause | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:46:05 | recreate/3-pause | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:46:05 | recreate/3-pause | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:46:05 | recreate/3-pause | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:46:05 | recreate/3-pause | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:46:05 | recreate/3-pause | ++++ which gdate logger.go:42: 17:46:05 | recreate/3-pause | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:46:05 | recreate/3-pause | ++++ which date logger.go:42: 17:46:05 | recreate/3-pause | +++ date=/usr/bin/date logger.go:42: 17:46:05 | recreate/3-pause | +++ oc get projects logger.go:42: 17:46:05 | recreate/3-pause | +++ : logger.go:42: 17:46:05 | recreate/3-pause | +++ kubectl get nodes logger.go:42: 17:46:05 | recreate/3-pause | +++ grep '^minikube' logger.go:42: 17:46:05 | recreate/3-pause | + yq eval .spec.pause=true - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 17:46:05 | recreate/3-pause | + get_cr logger.go:42: 17:46:05 | recreate/3-pause | + local name_suffix= logger.go:42: 17:46:05 | recreate/3-pause | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval .spec.mysql.size=3 - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:46:05 | recreate/3-pause | + kubectl -n kuttl-test-ethical-krill apply -f - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval .spec.orchestrator.size=3 - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 17:46:05 | recreate/3-pause | + '[' -n '' ']' logger.go:42: 17:46:05 | recreate/3-pause | + yq eval - logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.metadata.name="%s"' recreate logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.metadata.name="recreate"' /mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy/cr.yaml logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-825-808887c6"' - logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 17:46:05 | recreate/3-pause | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:46:05 | recreate/3-pause | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 17:46:12 | recreate/3-pause | perconaservermysql.ps.percona.com/recreate configured logger.go:42: 17:47:47 | recreate/3-pause | test step completed 3-pause logger.go:42: 17:47:47 | recreate/4-unpause | starting test step 4-unpause logger.go:42: 17:47:47 | recreate/4-unpause | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.pause=false' - \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval '.spec.mysql.size=3' - \ | yq eval '.spec.proxy.haproxy.enabled=true' - \ | yq eval '.spec.proxy.haproxy.size=3' - \ | yq eval '.spec.orchestrator.enabled=true' - \ | yq eval '.spec.orchestrator.size=3' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 17:47:47 | recreate/4-unpause | + source ../../functions logger.go:42: 17:47:47 | recreate/4-unpause | +++ realpath ../../.. logger.go:42: 17:47:47 | recreate/4-unpause | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:47:47 | recreate/4-unpause | ++++ pwd logger.go:42: 17:47:47 | recreate/4-unpause | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:47:47 | recreate/4-unpause | ++ test_name=recreate logger.go:42: 17:47:47 | recreate/4-unpause | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:47:47 | recreate/4-unpause | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:47:47 | recreate/4-unpause | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:47:47 | recreate/4-unpause | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:47:47 | recreate/4-unpause | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:47:47 | recreate/4-unpause | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:47:47 | recreate/4-unpause | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:47:47 | recreate/4-unpause | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:47:47 | recreate/4-unpause | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:47:47 | recreate/4-unpause | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:47:47 | recreate/4-unpause | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:47:47 | recreate/4-unpause | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:47:47 | recreate/4-unpause | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:47:47 | recreate/4-unpause | +++ GIT_BRANCH=PR-825 logger.go:42: 17:47:47 | recreate/4-unpause | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:47:47 | recreate/4-unpause | +++ VERSION=PR-825-808887c6 logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:47:47 | recreate/4-unpause | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:47:47 | recreate/4-unpause | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:47:47 | recreate/4-unpause | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:47:47 | recreate/4-unpause | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:47:47 | recreate/4-unpause | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:47:47 | recreate/4-unpause | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:47:47 | recreate/4-unpause | ++++ which gdate logger.go:42: 17:47:47 | recreate/4-unpause | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:47:47 | recreate/4-unpause | ++++ which date logger.go:42: 17:47:47 | recreate/4-unpause | +++ date=/usr/bin/date logger.go:42: 17:47:47 | recreate/4-unpause | +++ oc get projects logger.go:42: 17:47:47 | recreate/4-unpause | +++ : logger.go:42: 17:47:47 | recreate/4-unpause | +++ kubectl get nodes logger.go:42: 17:47:47 | recreate/4-unpause | +++ grep '^minikube' logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval .spec.pause=false - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 17:47:47 | recreate/4-unpause | + get_cr logger.go:42: 17:47:47 | recreate/4-unpause | + local name_suffix= logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval .spec.orchestrator.size=3 - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval .spec.mysql.size=3 - logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:47:47 | recreate/4-unpause | + kubectl -n kuttl-test-ethical-krill apply -f - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 17:47:47 | recreate/4-unpause | + '[' -n '' ']' logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval - logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.metadata.name="%s"' recreate logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.metadata.name="recreate"' /mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy/cr.yaml logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-825-808887c6"' - logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 17:47:47 | recreate/4-unpause | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:47:47 | recreate/4-unpause | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 17:47:49 | recreate/4-unpause | perconaservermysql.ps.percona.com/recreate configured logger.go:42: 17:50:50 | recreate/4-unpause | test step completed 4-unpause logger.go:42: 17:50:50 | recreate/5-write-data | starting test step 5-write-data logger.go:42: 17:50:50 | recreate/5-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "INSERT myDB.myTable (id) VALUES (100501)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" for i in 0 1 2; do host=$(get_mysql_headless_fqdn $(get_cluster_name) $i) data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${host} -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-write-data-${i} --from-literal=data="${data}" done] logger.go:42: 17:50:50 | recreate/5-write-data | + source ../../functions logger.go:42: 17:50:50 | recreate/5-write-data | +++ realpath ../../.. logger.go:42: 17:50:50 | recreate/5-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:50:50 | recreate/5-write-data | ++++ pwd logger.go:42: 17:50:50 | recreate/5-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:50:50 | recreate/5-write-data | ++ test_name=recreate logger.go:42: 17:50:50 | recreate/5-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:50:50 | recreate/5-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:50:50 | recreate/5-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:50:50 | recreate/5-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:50:50 | recreate/5-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:50:50 | recreate/5-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:50:50 | recreate/5-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:50:50 | recreate/5-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:50:50 | recreate/5-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:50:50 | recreate/5-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:50:50 | recreate/5-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:50:50 | recreate/5-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:50:50 | recreate/5-write-data | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:50:50 | recreate/5-write-data | +++ GIT_BRANCH=PR-825 logger.go:42: 17:50:50 | recreate/5-write-data | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:50:50 | recreate/5-write-data | +++ VERSION=PR-825-808887c6 logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:50:50 | recreate/5-write-data | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:50:50 | recreate/5-write-data | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:50:50 | recreate/5-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:50:50 | recreate/5-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:50:50 | recreate/5-write-data | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:50:50 | recreate/5-write-data | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:50:50 | recreate/5-write-data | ++++ which gdate logger.go:42: 17:50:50 | recreate/5-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:50:50 | recreate/5-write-data | ++++ which date logger.go:42: 17:50:50 | recreate/5-write-data | +++ date=/usr/bin/date logger.go:42: 17:50:50 | recreate/5-write-data | +++ oc get projects logger.go:42: 17:50:50 | recreate/5-write-data | +++ : logger.go:42: 17:50:50 | recreate/5-write-data | +++ kubectl get nodes logger.go:42: 17:50:50 | recreate/5-write-data | +++ grep '^minikube' logger.go:42: 17:50:50 | recreate/5-write-data | +++ get_cluster_name logger.go:42: 17:50:50 | recreate/5-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:50:51 | recreate/5-write-data | ++ get_haproxy_svc recreate logger.go:42: 17:50:51 | recreate/5-write-data | ++ local cluster=recreate logger.go:42: 17:50:51 | recreate/5-write-data | ++ echo recreate-haproxy logger.go:42: 17:50:51 | recreate/5-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100501)' '-h recreate-haproxy -uroot -proot_password' logger.go:42: 17:50:51 | recreate/5-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100501)' logger.go:42: 17:50:51 | recreate/5-write-data | + local 'uri=-h recreate-haproxy -uroot -proot_password' logger.go:42: 17:50:51 | recreate/5-write-data | + local pod= logger.go:42: 17:50:51 | recreate/5-write-data | ++ get_client_pod logger.go:42: 17:50:51 | recreate/5-write-data | ++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:50:51 | recreate/5-write-data | + client_pod=mysql-client logger.go:42: 17:50:51 | recreate/5-write-data | + wait_pod mysql-client logger.go:42: 17:50:51 | recreate/5-write-data | + local pod=mysql-client logger.go:42: 17:50:51 | recreate/5-write-data | + set +o xtrace logger.go:42: 17:50:51 | recreate/5-write-data | mysql-clienttrue logger.go:42: 17:50:51 | recreate/5-write-data | + kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100501)" | mysql -sN -h recreate-haproxy -uroot -proot_password' logger.go:42: 17:50:51 | recreate/5-write-data | + sed -e 's/mysql: //' logger.go:42: 17:50:51 | recreate/5-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:50:53 | recreate/5-write-data | + : logger.go:42: 17:50:53 | recreate/5-write-data | + for i in 0 1 2 logger.go:42: 17:50:53 | recreate/5-write-data | +++ get_cluster_name logger.go:42: 17:50:53 | recreate/5-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:50:54 | recreate/5-write-data | ++ get_mysql_headless_fqdn recreate 0 logger.go:42: 17:50:54 | recreate/5-write-data | ++ local cluster=recreate logger.go:42: 17:50:54 | recreate/5-write-data | ++ local index=0 logger.go:42: 17:50:54 | recreate/5-write-data | ++ echo recreate-mysql-0.recreate-mysql logger.go:42: 17:50:54 | recreate/5-write-data | + host=recreate-mysql-0.recreate-mysql logger.go:42: 17:50:54 | recreate/5-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:50:54 | recreate/5-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:50:54 | recreate/5-write-data | ++ local 'uri=-h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:50:54 | recreate/5-write-data | ++ local pod= logger.go:42: 17:50:54 | recreate/5-write-data | +++ get_client_pod logger.go:42: 17:50:54 | recreate/5-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:50:54 | recreate/5-write-data | ++ client_pod=mysql-client logger.go:42: 17:50:54 | recreate/5-write-data | ++ wait_pod mysql-client logger.go:42: 17:50:54 | recreate/5-write-data | ++ local pod=mysql-client logger.go:42: 17:50:54 | recreate/5-write-data | ++ set +o xtrace logger.go:42: 17:50:54 | recreate/5-write-data | mysql-clienttrue logger.go:42: 17:50:54 | recreate/5-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:50:54 | recreate/5-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:50:54 | recreate/5-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:50:56 | recreate/5-write-data | + data='100500 logger.go:42: 17:50:56 | recreate/5-write-data | 100501' logger.go:42: 17:50:56 | recreate/5-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 06-write-data-0 '--from-literal=data=100500 logger.go:42: 17:50:56 | recreate/5-write-data | 100501' logger.go:42: 17:50:56 | recreate/5-write-data | configmap/06-write-data-0 created logger.go:42: 17:50:56 | recreate/5-write-data | + for i in 0 1 2 logger.go:42: 17:50:56 | recreate/5-write-data | +++ get_cluster_name logger.go:42: 17:50:56 | recreate/5-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:50:57 | recreate/5-write-data | ++ get_mysql_headless_fqdn recreate 1 logger.go:42: 17:50:57 | recreate/5-write-data | ++ local cluster=recreate logger.go:42: 17:50:57 | recreate/5-write-data | ++ local index=1 logger.go:42: 17:50:57 | recreate/5-write-data | ++ echo recreate-mysql-1.recreate-mysql logger.go:42: 17:50:57 | recreate/5-write-data | + host=recreate-mysql-1.recreate-mysql logger.go:42: 17:50:57 | recreate/5-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:50:57 | recreate/5-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:50:57 | recreate/5-write-data | ++ local 'uri=-h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:50:57 | recreate/5-write-data | ++ local pod= logger.go:42: 17:50:57 | recreate/5-write-data | +++ get_client_pod logger.go:42: 17:50:57 | recreate/5-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:50:57 | recreate/5-write-data | ++ client_pod=mysql-client logger.go:42: 17:50:57 | recreate/5-write-data | ++ wait_pod mysql-client logger.go:42: 17:50:57 | recreate/5-write-data | ++ local pod=mysql-client logger.go:42: 17:50:57 | recreate/5-write-data | ++ set +o xtrace logger.go:42: 17:50:57 | recreate/5-write-data | mysql-clienttrue logger.go:42: 17:50:57 | recreate/5-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:50:57 | recreate/5-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:50:57 | recreate/5-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:50:59 | recreate/5-write-data | + data='100500 logger.go:42: 17:50:59 | recreate/5-write-data | 100501' logger.go:42: 17:50:59 | recreate/5-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 06-write-data-1 '--from-literal=data=100500 logger.go:42: 17:50:59 | recreate/5-write-data | 100501' logger.go:42: 17:50:59 | recreate/5-write-data | configmap/06-write-data-1 created logger.go:42: 17:50:59 | recreate/5-write-data | + for i in 0 1 2 logger.go:42: 17:50:59 | recreate/5-write-data | +++ get_cluster_name logger.go:42: 17:50:59 | recreate/5-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:51:00 | recreate/5-write-data | ++ get_mysql_headless_fqdn recreate 2 logger.go:42: 17:51:00 | recreate/5-write-data | ++ local cluster=recreate logger.go:42: 17:51:00 | recreate/5-write-data | ++ local index=2 logger.go:42: 17:51:00 | recreate/5-write-data | ++ echo recreate-mysql-2.recreate-mysql logger.go:42: 17:51:00 | recreate/5-write-data | + host=recreate-mysql-2.recreate-mysql logger.go:42: 17:51:00 | recreate/5-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:51:00 | recreate/5-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:51:00 | recreate/5-write-data | ++ local 'uri=-h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:51:00 | recreate/5-write-data | ++ local pod= logger.go:42: 17:51:00 | recreate/5-write-data | +++ get_client_pod logger.go:42: 17:51:00 | recreate/5-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:51:00 | recreate/5-write-data | ++ client_pod=mysql-client logger.go:42: 17:51:00 | recreate/5-write-data | ++ wait_pod mysql-client logger.go:42: 17:51:00 | recreate/5-write-data | ++ local pod=mysql-client logger.go:42: 17:51:00 | recreate/5-write-data | ++ set +o xtrace logger.go:42: 17:51:01 | recreate/5-write-data | mysql-clienttrue logger.go:42: 17:51:01 | recreate/5-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:51:01 | recreate/5-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:51:01 | recreate/5-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:51:02 | recreate/5-write-data | + data='100500 logger.go:42: 17:51:02 | recreate/5-write-data | 100501' logger.go:42: 17:51:02 | recreate/5-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 06-write-data-2 '--from-literal=data=100500 logger.go:42: 17:51:02 | recreate/5-write-data | 100501' logger.go:42: 17:51:02 | recreate/5-write-data | configmap/06-write-data-2 created logger.go:42: 17:51:03 | recreate/5-write-data | test step completed 5-write-data logger.go:42: 17:51:03 | recreate/7-delete-cluster | starting test step 7-delete-cluster logger.go:42: 17:51:03 | recreate/7-delete-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete ps -n ${NAMESPACE} recreate] logger.go:42: 17:51:03 | recreate/7-delete-cluster | + source ../../functions logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ realpath ../../.. logger.go:42: 17:51:03 | recreate/7-delete-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:51:03 | recreate/7-delete-cluster | ++++ pwd logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:51:03 | recreate/7-delete-cluster | ++ test_name=recreate logger.go:42: 17:51:03 | recreate/7-delete-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:51:03 | recreate/7-delete-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ GIT_BRANCH=PR-825 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ VERSION=PR-825-808887c6 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:51:03 | recreate/7-delete-cluster | ++++ which gdate logger.go:42: 17:51:03 | recreate/7-delete-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:51:03 | recreate/7-delete-cluster | ++++ which date logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ date=/usr/bin/date logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ oc get projects logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ : logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ kubectl get nodes logger.go:42: 17:51:03 | recreate/7-delete-cluster | +++ grep '^minikube' logger.go:42: 17:51:04 | recreate/7-delete-cluster | + kubectl delete ps -n kuttl-test-ethical-krill recreate logger.go:42: 17:51:04 | recreate/7-delete-cluster | perconaservermysql.ps.percona.com "recreate" deleted logger.go:42: 17:51:23 | recreate/7-delete-cluster | test step completed 7-delete-cluster logger.go:42: 17:51:23 | recreate/8- | starting test step 8- logger.go:42: 17:51:24 | recreate/8- | test step completed 8- logger.go:42: 17:51:24 | recreate/9-recreate-cluster | starting test step 9-recreate-cluster logger.go:42: 17:51:24 | recreate/9-recreate-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.pause=false' - \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval '.spec.mysql.size=3' - \ | yq eval '.spec.proxy.haproxy.enabled=true' - \ | yq eval '.spec.proxy.haproxy.size=3' - \ | yq eval '.spec.orchestrator.enabled=true' - \ | yq eval '.spec.orchestrator.size=3' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + source ../../functions logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ realpath ../../.. logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++++ pwd logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ test_name=recreate logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ GIT_BRANCH=PR-825 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ VERSION=PR-825-808887c6 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++++ which gdate logger.go:42: 17:51:24 | recreate/9-recreate-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++++ which date logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ date=/usr/bin/date logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ oc get projects logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ : logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ kubectl get nodes logger.go:42: 17:51:24 | recreate/9-recreate-cluster | +++ grep '^minikube' logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + get_cr logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + local name_suffix= logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval .spec.pause=false - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + kubectl -n kuttl-test-ethical-krill apply -f - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval .spec.orchestrator.size=3 - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval .spec.mysql.size=3 - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + '[' -n '' ']' logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.metadata.name="%s"' recreate logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.metadata.name="recreate"' /mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy/cr.yaml logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-825-808887c6"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 17:51:24 | recreate/9-recreate-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:51:24 | recreate/9-recreate-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 17:51:25 | recreate/9-recreate-cluster | perconaservermysql.ps.percona.com/recreate created logger.go:42: 17:54:53 | recreate/9-recreate-cluster | test step completed 9-recreate-cluster logger.go:42: 17:54:53 | recreate/10-write-data | starting test step 10-write-data logger.go:42: 17:54:53 | recreate/10-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "INSERT myDB.myTable (id) VALUES (100502)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" for i in 0 1 2; do host=$(get_mysql_headless_fqdn $(get_cluster_name) $i) data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${host} -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 11-write-data-${i} --from-literal=data="${data}" done] logger.go:42: 17:54:53 | recreate/10-write-data | + source ../../functions logger.go:42: 17:54:53 | recreate/10-write-data | +++ realpath ../../.. logger.go:42: 17:54:53 | recreate/10-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:54:53 | recreate/10-write-data | ++++ pwd logger.go:42: 17:54:53 | recreate/10-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:54:53 | recreate/10-write-data | ++ test_name=recreate logger.go:42: 17:54:53 | recreate/10-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:54:53 | recreate/10-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:54:53 | recreate/10-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:54:53 | recreate/10-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:54:53 | recreate/10-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:54:53 | recreate/10-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:54:53 | recreate/10-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:54:53 | recreate/10-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:54:53 | recreate/10-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:54:53 | recreate/10-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:54:53 | recreate/10-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:54:53 | recreate/10-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:54:53 | recreate/10-write-data | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:54:53 | recreate/10-write-data | +++ GIT_BRANCH=PR-825 logger.go:42: 17:54:53 | recreate/10-write-data | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:54:53 | recreate/10-write-data | +++ VERSION=PR-825-808887c6 logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:54:53 | recreate/10-write-data | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:54:53 | recreate/10-write-data | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:54:53 | recreate/10-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:54:53 | recreate/10-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:54:53 | recreate/10-write-data | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:54:53 | recreate/10-write-data | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:54:53 | recreate/10-write-data | ++++ which gdate logger.go:42: 17:54:53 | recreate/10-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:54:53 | recreate/10-write-data | ++++ which date logger.go:42: 17:54:53 | recreate/10-write-data | +++ date=/usr/bin/date logger.go:42: 17:54:53 | recreate/10-write-data | +++ oc get projects logger.go:42: 17:54:53 | recreate/10-write-data | +++ : logger.go:42: 17:54:53 | recreate/10-write-data | +++ kubectl get nodes logger.go:42: 17:54:53 | recreate/10-write-data | +++ grep '^minikube' logger.go:42: 17:54:54 | recreate/10-write-data | +++ get_cluster_name logger.go:42: 17:54:54 | recreate/10-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:54:54 | recreate/10-write-data | ++ get_haproxy_svc recreate logger.go:42: 17:54:54 | recreate/10-write-data | ++ local cluster=recreate logger.go:42: 17:54:54 | recreate/10-write-data | ++ echo recreate-haproxy logger.go:42: 17:54:54 | recreate/10-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100502)' '-h recreate-haproxy -uroot -proot_password' logger.go:42: 17:54:54 | recreate/10-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100502)' logger.go:42: 17:54:54 | recreate/10-write-data | + local 'uri=-h recreate-haproxy -uroot -proot_password' logger.go:42: 17:54:54 | recreate/10-write-data | + local pod= logger.go:42: 17:54:54 | recreate/10-write-data | ++ get_client_pod logger.go:42: 17:54:54 | recreate/10-write-data | ++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:54:55 | recreate/10-write-data | + client_pod=mysql-client logger.go:42: 17:54:55 | recreate/10-write-data | + wait_pod mysql-client logger.go:42: 17:54:55 | recreate/10-write-data | + local pod=mysql-client logger.go:42: 17:54:55 | recreate/10-write-data | + set +o xtrace logger.go:42: 17:54:55 | recreate/10-write-data | mysql-clienttrue logger.go:42: 17:54:55 | recreate/10-write-data | + kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100502)" | mysql -sN -h recreate-haproxy -uroot -proot_password' logger.go:42: 17:54:55 | recreate/10-write-data | + sed -e 's/mysql: //' logger.go:42: 17:54:55 | recreate/10-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:54:57 | recreate/10-write-data | + : logger.go:42: 17:54:57 | recreate/10-write-data | + for i in 0 1 2 logger.go:42: 17:54:57 | recreate/10-write-data | +++ get_cluster_name logger.go:42: 17:54:57 | recreate/10-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:54:57 | recreate/10-write-data | ++ get_mysql_headless_fqdn recreate 0 logger.go:42: 17:54:57 | recreate/10-write-data | ++ local cluster=recreate logger.go:42: 17:54:57 | recreate/10-write-data | ++ local index=0 logger.go:42: 17:54:57 | recreate/10-write-data | ++ echo recreate-mysql-0.recreate-mysql logger.go:42: 17:54:57 | recreate/10-write-data | + host=recreate-mysql-0.recreate-mysql logger.go:42: 17:54:57 | recreate/10-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:54:57 | recreate/10-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:54:57 | recreate/10-write-data | ++ local 'uri=-h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:54:57 | recreate/10-write-data | ++ local pod= logger.go:42: 17:54:57 | recreate/10-write-data | +++ get_client_pod logger.go:42: 17:54:57 | recreate/10-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:54:57 | recreate/10-write-data | ++ client_pod=mysql-client logger.go:42: 17:54:57 | recreate/10-write-data | ++ wait_pod mysql-client logger.go:42: 17:54:57 | recreate/10-write-data | ++ local pod=mysql-client logger.go:42: 17:54:57 | recreate/10-write-data | ++ set +o xtrace logger.go:42: 17:54:58 | recreate/10-write-data | mysql-clienttrue logger.go:42: 17:54:58 | recreate/10-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-0.recreate-mysql -uroot -proot_password' logger.go:42: 17:54:58 | recreate/10-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:54:58 | recreate/10-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:54:59 | recreate/10-write-data | + data='100500 logger.go:42: 17:54:59 | recreate/10-write-data | 100501 logger.go:42: 17:54:59 | recreate/10-write-data | 100502' logger.go:42: 17:54:59 | recreate/10-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 11-write-data-0 '--from-literal=data=100500 logger.go:42: 17:54:59 | recreate/10-write-data | 100501 logger.go:42: 17:54:59 | recreate/10-write-data | 100502' logger.go:42: 17:55:00 | recreate/10-write-data | configmap/11-write-data-0 created logger.go:42: 17:55:00 | recreate/10-write-data | + for i in 0 1 2 logger.go:42: 17:55:00 | recreate/10-write-data | +++ get_cluster_name logger.go:42: 17:55:00 | recreate/10-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:55:00 | recreate/10-write-data | ++ get_mysql_headless_fqdn recreate 1 logger.go:42: 17:55:00 | recreate/10-write-data | ++ local cluster=recreate logger.go:42: 17:55:00 | recreate/10-write-data | ++ local index=1 logger.go:42: 17:55:00 | recreate/10-write-data | ++ echo recreate-mysql-1.recreate-mysql logger.go:42: 17:55:00 | recreate/10-write-data | + host=recreate-mysql-1.recreate-mysql logger.go:42: 17:55:00 | recreate/10-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:55:00 | recreate/10-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:55:00 | recreate/10-write-data | ++ local 'uri=-h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:55:00 | recreate/10-write-data | ++ local pod= logger.go:42: 17:55:00 | recreate/10-write-data | +++ get_client_pod logger.go:42: 17:55:00 | recreate/10-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:55:00 | recreate/10-write-data | ++ client_pod=mysql-client logger.go:42: 17:55:00 | recreate/10-write-data | ++ wait_pod mysql-client logger.go:42: 17:55:00 | recreate/10-write-data | ++ local pod=mysql-client logger.go:42: 17:55:00 | recreate/10-write-data | ++ set +o xtrace logger.go:42: 17:55:01 | recreate/10-write-data | mysql-clienttrue logger.go:42: 17:55:01 | recreate/10-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-1.recreate-mysql -uroot -proot_password' logger.go:42: 17:55:01 | recreate/10-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:55:01 | recreate/10-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:55:02 | recreate/10-write-data | + data='100500 logger.go:42: 17:55:02 | recreate/10-write-data | 100501 logger.go:42: 17:55:02 | recreate/10-write-data | 100502' logger.go:42: 17:55:02 | recreate/10-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 11-write-data-1 '--from-literal=data=100500 logger.go:42: 17:55:02 | recreate/10-write-data | 100501 logger.go:42: 17:55:02 | recreate/10-write-data | 100502' logger.go:42: 17:55:03 | recreate/10-write-data | configmap/11-write-data-1 created logger.go:42: 17:55:03 | recreate/10-write-data | + for i in 0 1 2 logger.go:42: 17:55:03 | recreate/10-write-data | +++ get_cluster_name logger.go:42: 17:55:03 | recreate/10-write-data | +++ kubectl -n kuttl-test-ethical-krill get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 17:55:03 | recreate/10-write-data | ++ get_mysql_headless_fqdn recreate 2 logger.go:42: 17:55:03 | recreate/10-write-data | ++ local cluster=recreate logger.go:42: 17:55:03 | recreate/10-write-data | ++ local index=2 logger.go:42: 17:55:03 | recreate/10-write-data | ++ echo recreate-mysql-2.recreate-mysql logger.go:42: 17:55:03 | recreate/10-write-data | + host=recreate-mysql-2.recreate-mysql logger.go:42: 17:55:03 | recreate/10-write-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:55:03 | recreate/10-write-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 17:55:03 | recreate/10-write-data | ++ local 'uri=-h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:55:03 | recreate/10-write-data | ++ local pod= logger.go:42: 17:55:03 | recreate/10-write-data | +++ get_client_pod logger.go:42: 17:55:03 | recreate/10-write-data | +++ kubectl -n kuttl-test-ethical-krill get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:55:03 | recreate/10-write-data | ++ client_pod=mysql-client logger.go:42: 17:55:03 | recreate/10-write-data | ++ wait_pod mysql-client logger.go:42: 17:55:03 | recreate/10-write-data | ++ local pod=mysql-client logger.go:42: 17:55:03 | recreate/10-write-data | ++ set +o xtrace logger.go:42: 17:55:04 | recreate/10-write-data | mysql-clienttrue logger.go:42: 17:55:04 | recreate/10-write-data | ++ sed -e 's/mysql: //' logger.go:42: 17:55:04 | recreate/10-write-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 17:55:04 | recreate/10-write-data | ++ kubectl -n kuttl-test-ethical-krill exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h recreate-mysql-2.recreate-mysql -uroot -proot_password' logger.go:42: 17:55:05 | recreate/10-write-data | + data='100500 logger.go:42: 17:55:05 | recreate/10-write-data | 100501 logger.go:42: 17:55:05 | recreate/10-write-data | 100502' logger.go:42: 17:55:05 | recreate/10-write-data | + kubectl create configmap -n kuttl-test-ethical-krill 11-write-data-2 '--from-literal=data=100500 logger.go:42: 17:55:05 | recreate/10-write-data | 100501 logger.go:42: 17:55:05 | recreate/10-write-data | 100502' logger.go:42: 17:55:06 | recreate/10-write-data | configmap/11-write-data-2 created logger.go:42: 17:55:07 | recreate/10-write-data | test step completed 10-write-data logger.go:42: 17:55:07 | recreate/98-drop-finalizer | starting test step 98-drop-finalizer logger.go:42: 17:55:07 | recreate/98-drop-finalizer | PerconaServerMySQL:kuttl-test-ethical-krill/recreate updated logger.go:42: 17:55:07 | recreate/98-drop-finalizer | test step completed 98-drop-finalizer logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions destroy_operator] logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/tests/recreate logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | ++ test_name=recreate logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/vars.sh logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-825 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/deploy logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-825/e2e-tests/conf logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/ps/recreate logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-825 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-825 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export VERSION=PR-825-808887c6 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ VERSION=PR-825-808887c6 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-825-808887c6 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=1.4.0 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ export CERT_MANAGER_VER=1.16.3 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ CERT_MANAGER_VER=1.16.3 logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-825/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | ++++ which date logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ date=/usr/bin/date logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ oc get projects logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ : logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ kubectl get nodes logger.go:42: 17:55:07 | recreate/99-remove-cluster-gracefully | +++ grep '^minikube' logger.go:42: 17:55:08 | recreate/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 17:55:08 | recreate/99-remove-cluster-gracefully | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 17:55:08 | recreate/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 17:55:09 | recreate/99-remove-cluster-gracefully | deployment.apps "percona-server-mysql-operator" force deleted logger.go:42: 17:55:09 | recreate/99-remove-cluster-gracefully | + [[ -n ps-operator ]] logger.go:42: 17:55:09 | recreate/99-remove-cluster-gracefully | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 17:55:09 | recreate/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 17:55:09 | recreate/99-remove-cluster-gracefully | namespace "ps-operator" force deleted logger.go:42: 17:55:15 | recreate/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 17:55:15 | recreate | recreate events from ns kuttl-test-ethical-krill: logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:25 +0000 UTC Normal Pod mysql-client Binding Scheduled Successfully assigned kuttl-test-ethical-krill/mysql-client to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:26 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulled Container image "percona/percona-server:8.0.33" already present on machine kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:26 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Created Created container: mysql-client kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:26 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Started Started container mysql-client kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:37 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:37 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Claim datadir-recreate-mysql-0 Pod recreate-mysql-0 in StatefulSet recreate-mysql success statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:38 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:38 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-ethical-krill/datadir-recreate-mysql-0" pd.csi.storage.gke.io_gke-4fda7b6352e34818b132-132d-2d80-vm_35932e2e-0307-41e9-9804-89920a9ce6ae logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:38 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Pod recreate-mysql-0 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:38 +0000 UTC Normal Pod recreate-orc-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:38 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulCreate create Pod recreate-orc-0 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:39 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:39 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 259ms (260ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:39 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:39 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:40 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:41 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-3414c18a-9086-4e36-996e-6b235f75b566 pd.csi.storage.gke.io_gke-4fda7b6352e34818b132-132d-2d80-vm_35932e2e-0307-41e9-9804-89920a9ce6ae logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:41 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 222ms (222ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:41 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:41 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:41 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:41 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 221ms (221ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:41 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:41 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:42 +0000 UTC Normal Pod recreate-mysql-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:49 +0000 UTC Normal Pod recreate-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3414c18a-9086-4e36-996e-6b235f75b566" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:51 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:51 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 268ms (268ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:51 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:51 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:53 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:53 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 229ms (229ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:53 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:53 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:53 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:53 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 240ms (240ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:53 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:54 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:54 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:54 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 268ms (268ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:54 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:39:54 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:14 +0000 UTC Normal Pod recreate-orc-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:14 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:14 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 284ms (284ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:14 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:14 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulCreate create Pod recreate-orc-1 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:15 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:16 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:16 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 235ms (235ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:16 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:16 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:16 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:17 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 270ms (270ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:17 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:17 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:26 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:26 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:26 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-ethical-krill/datadir-recreate-mysql-1" pd.csi.storage.gke.io_gke-4fda7b6352e34818b132-132d-2d80-vm_35932e2e-0307-41e9-9804-89920a9ce6ae logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:26 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Claim datadir-recreate-mysql-1 Pod recreate-mysql-1 in StatefulSet recreate-mysql success statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:26 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Pod recreate-mysql-1 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:29 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-7348f673-b4fa-4330-ab05-5d7c0b6b17c3 pd.csi.storage.gke.io_gke-4fda7b6352e34818b132-132d-2d80-vm_35932e2e-0307-41e9-9804-89920a9ce6ae logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:30 +0000 UTC Normal Pod recreate-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:30 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulCreate create Pod recreate-haproxy-0 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:30 +0000 UTC Normal Pod recreate-mysql-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:31 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:31 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 277ms (277ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:31 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:31 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:33 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:33 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 228ms (228ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:33 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:33 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:33 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:34 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 219ms (219ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:34 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:34 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:34 +0000 UTC Normal Pod recreate-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:34 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulCreate create Pod recreate-haproxy-1 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:35 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:35 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 265ms (265ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:35 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:35 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 247ms (247ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 242ms (242ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:37 +0000 UTC Normal Pod recreate-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7348f673-b4fa-4330-ab05-5d7c0b6b17c3" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:38 +0000 UTC Normal Pod recreate-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:38 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:38 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 266ms (266ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:38 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:38 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulCreate create Pod recreate-haproxy-2 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:39 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:39 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:39 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 262ms (262ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:39 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:39 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:40 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:40 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 215ms (215ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:40 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:40 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:40 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 246ms (246ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 238ms (238ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 223ms (223ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:41 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:42 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 294ms (294ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:42 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:42 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:45 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:45 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:45 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:46 +0000 UTC Warning Pod recreate-haproxy-1 Scheduling FailedScheduling 0/3 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }, 2 node(s) didn't match pod anti-affinity rules. preemption: 0/3 nodes are available: 1 Preemption is not helpful for scheduling, 2 No preemption victims found for incoming pod. default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:46 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulDelete delete Pod recreate-haproxy-1 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:46 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:46 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:46 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:46 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:47 +0000 UTC Normal Pod recreate-haproxy-1 NotTriggerScaleUp pod didn't trigger scale-up: cluster-autoscaler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:48 +0000 UTC Warning Pod recreate-haproxy-1 Scheduling FailedScheduling 0/3 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }, 2 node(s) didn't match pod anti-affinity rules. preemption: 0/3 nodes are available: 1 Preemption is not helpful for scheduling, 2 No preemption victims found for incoming pod. default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:49 +0000 UTC Normal Pod recreate-orc-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:49 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulCreate create Pod recreate-orc-2 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:50 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:50 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 291ms (291ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:50 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:50 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:52 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:52 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 247ms (247ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:52 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:52 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:52 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:53 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 178ms (178ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:53 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:40:53 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:02 +0000 UTC Normal Pod recreate-mysql-1 TaintManagerEviction Cancelling deletion of Pod kuttl-test-ethical-krill/recreate-mysql-1 taint-eviction-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:02 +0000 UTC Normal Pod recreate-orc-0 TaintManagerEviction Cancelling deletion of Pod kuttl-test-ethical-krill/recreate-orc-0 taint-eviction-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:02 +0000 UTC Warning Pod recreate-orc-0 Scheduling FailedScheduling 0/3 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }, 2 node(s) didn't match pod anti-affinity rules. preemption: 0/3 nodes are available: 1 Preemption is not helpful for scheduling, 2 No preemption victims found for incoming pod. default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:02 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulDelete delete Pod recreate-orc-0 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:03 +0000 UTC Normal Pod recreate-mysql-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:03 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulDelete delete Pod recreate-mysql-1 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:03 +0000 UTC Normal Pod recreate-orc-0 NotTriggerScaleUp pod didn't trigger scale-up: cluster-autoscaler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:03 +0000 UTC Warning Pod recreate-orc-0 Scheduling FailedScheduling 0/3 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }, 2 node(s) didn't match pod anti-affinity rules. preemption: 0/3 nodes are available: 1 Preemption is not helpful for scheduling, 2 No preemption victims found for incoming pod. default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:04 +0000 UTC Warning Pod recreate-mysql-1 FailedAttachVolume Multi-Attach error for volume "pvc-7348f673-b4fa-4330-ab05-5d7c0b6b17c3" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:41:06 +0000 UTC Warning Pod recreate-orc-0 Scheduling FailedScheduling 0/3 nodes are available: 1 node(s) had untolerated taint {node.kubernetes.io/not-ready: }, 2 node(s) didn't match pod anti-affinity rules. preemption: 0/3 nodes are available: 1 Preemption is not helpful for scheduling, 2 No preemption victims found for incoming pod. default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:34 +0000 UTC Normal Pod recreate-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:34 +0000 UTC Normal Pod recreate-orc-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:35 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:35 +0000 UTC Normal Pod recreate-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7348f673-b4fa-4330-ab05-5d7c0b6b17c3" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:35 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:37 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:37 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 270ms (270ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:37 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:37 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:38 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 223ms (223ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 238ms (238ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 238ms (238ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:39 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:40 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:43 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 8.755s (8.755s including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:43 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:43 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:43 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 8.498s (8.746s including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:43 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:43 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:50 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:50 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:55 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 4.327s (4.327s including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:55 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:55 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:55 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:55 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 226ms (226ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:55 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:55 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:56 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 5.7s (5.7s including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:56 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:56 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:56 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:57 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 248ms (248ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:57 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:57 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:58 +0000 UTC Warning Pod recreate-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/03/11 17:42:56 Peers: [3732373134613533.recreate-mysql-unready.kuttl-test-ethical-krill 6634333539343630.recreate-mysql-unready.kuttl-test-ethical-krill] 2025/03/11 17:42:56 FQDN: recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:42:56 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill] 2025/03/11 17:42:56 lookup recreate-mysql-1 [10.93.66.30] 2025/03/11 17:42:56 PodIP: 10.93.66.30 2025/03/11 17:42:56 lookup recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill [10.93.65.36] 2025/03/11 17:42:56 PrimaryIP: 10.93.65.36 2025/03/11 17:42:57 Donor: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:42:57 Opening connection to 10.93.66.30 2025/03/11 17:42:57 Clone required: true 2025/03/11 17:42:57 Checking if a clone in progress 2025/03/11 17:42:57 Clone in progress: false 2025/03/11 17:42:57 Cloning from recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:42:58 Clone finished. Restarting container... kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:42:58 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:01 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 211ms (211ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:02 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 222ms (222ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:37 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:37 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:37 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-ethical-krill/datadir-recreate-mysql-2" pd.csi.storage.gke.io_gke-4fda7b6352e34818b132-132d-2d80-vm_35932e2e-0307-41e9-9804-89920a9ce6ae logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:37 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Claim datadir-recreate-mysql-2 Pod recreate-mysql-2 in StatefulSet recreate-mysql success statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:37 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Pod recreate-mysql-2 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:41 +0000 UTC Normal PersistentVolumeClaim datadir-recreate-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-99573725-61be-495c-8230-0c642a4aecbf pd.csi.storage.gke.io_gke-4fda7b6352e34818b132-132d-2d80-vm_35932e2e-0307-41e9-9804-89920a9ce6ae logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:41 +0000 UTC Normal Pod recreate-mysql-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:49 +0000 UTC Normal Pod recreate-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-99573725-61be-495c-8230-0c642a4aecbf" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:50 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:50 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 235ms (235ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:50 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:50 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:43:53 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:17 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 24.117s (24.117s including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:17 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:17 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:17 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:32 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 15.3s (15.3s including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:32 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:32 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:32 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:37 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 4.352s (4.352s including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:37 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:37 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:40 +0000 UTC Warning Pod recreate-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/03/11 17:44:40 Peers: [3534313065323537.recreate-mysql-unready.kuttl-test-ethical-krill 3732373134613533.recreate-mysql-unready.kuttl-test-ethical-krill 6634333539343630.recreate-mysql-unready.kuttl-test-ethical-krill] 2025/03/11 17:44:40 FQDN: recreate-mysql-2.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:44:40 bootstrap finished in 0.005955 seconds 2025/03/11 17:44:40 bootstrap failed: select donor: connect to 3534313065323537.recreate-mysql-unready.kuttl-test-ethical-krill: ping DB: dial tcp 10.93.64.5:33062: connect: connection refused kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:41 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:44:51 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 193ms (193ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:45:11 +0000 UTC Warning Pod recreate-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/03/11 17:45:10 Peers: [3534313065323537.recreate-mysql-unready.kuttl-test-ethical-krill 3732373134613533.recreate-mysql-unready.kuttl-test-ethical-krill 6634333539343630.recreate-mysql-unready.kuttl-test-ethical-krill] 2025/03/11 17:45:10 FQDN: recreate-mysql-2.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:45:10 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill recreate-mysql-2.recreate-mysql.kuttl-test-ethical-krill] 2025/03/11 17:45:10 lookup recreate-mysql-2 [10.93.64.5] 2025/03/11 17:45:10 PodIP: 10.93.64.5 2025/03/11 17:45:10 lookup recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill [10.93.65.36] 2025/03/11 17:45:10 PrimaryIP: 10.93.65.36 2025/03/11 17:45:10 Donor: recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:45:10 Opening connection to 10.93.64.5 2025/03/11 17:45:10 Clone required: true 2025/03/11 17:45:10 Checking if a clone in progress 2025/03/11 17:45:10 Clone in progress: false 2025/03/11 17:45:10 Cloning from recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:45:11 Clone finished. Restarting container... kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:12 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulDelete delete Pod recreate-mysql-2 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:12 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:12 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:12 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulDelete delete Pod recreate-orc-2 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:13 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:13 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:13 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:13 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:13 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulDelete delete Pod recreate-haproxy-2 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:21 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:21 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:21 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:21 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulDelete delete Pod recreate-mysql-0 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:25 +0000 UTC Warning Pod recreate-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:43 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:43 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:43 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulDelete delete Pod recreate-orc-1 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:44 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:44 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:46:44 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulDelete delete Pod recreate-haproxy-0 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:14 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:15 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:49 +0000 UTC Normal Pod recreate-mysql-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:49 +0000 UTC Normal Pod recreate-orc-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:50 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:51 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 269ms (269ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:51 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:51 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:52 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:53 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 227ms (227ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:53 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:53 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:53 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:53 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 252ms (252ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:53 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:53 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:56 +0000 UTC Normal Pod recreate-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3414c18a-9086-4e36-996e-6b235f75b566" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:58 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:58 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 279ms (279ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:58 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:58 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:47:59 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 216ms (216ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 206ms (206ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 224ms (224ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:00 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:26 +0000 UTC Normal Pod recreate-orc-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:26 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:26 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 297ms (297ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:26 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:26 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:29 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:29 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 233ms (233ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:29 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:29 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:29 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:29 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 229ms (229ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:29 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:29 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:33 +0000 UTC Normal Pod recreate-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:33 +0000 UTC Normal Pod recreate-mysql-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:34 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:34 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 266ms (266ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:34 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:34 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:36 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:37 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 227ms (227ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:37 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:37 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:37 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:37 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 254ms (254ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:37 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:37 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:38 +0000 UTC Normal Pod recreate-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:38 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:38 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 305ms (305ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:38 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:38 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:40 +0000 UTC Normal Pod recreate-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7348f673-b4fa-4330-ab05-5d7c0b6b17c3" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:41 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:41 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 234ms (234ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:41 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:41 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:41 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:41 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 216ms (216ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:41 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:41 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:42 +0000 UTC Normal Pod recreate-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:42 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:42 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 257ms (258ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:42 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:42 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:43 +0000 UTC Warning Pod recreate-haproxy-2 FailedMount MountVolume.SetUp failed for volume "config" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:43 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 307ms (307ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 227ms (227ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 249ms (249ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 242ms (242ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:44 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:46 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:47 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 240ms (240ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:47 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:47 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:47 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:47 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 240ms (240ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:47 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:48:47 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:01 +0000 UTC Normal Pod recreate-orc-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:01 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:02 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 276ms (276ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:02 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:02 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:04 +0000 UTC Warning Pod recreate-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/03/11 17:49:01 Peers: [6132613566393735.recreate-mysql-unready.kuttl-test-ethical-krill 6462633466366532.recreate-mysql-unready.kuttl-test-ethical-krill] 2025/03/11 17:49:01 FQDN: recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:49:01 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill] 2025/03/11 17:49:01 lookup recreate-mysql-1 [10.93.65.41] 2025/03/11 17:49:01 PodIP: 10.93.65.41 2025/03/11 17:49:01 lookup recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill [10.93.64.7] 2025/03/11 17:49:01 PrimaryIP: 10.93.64.7 2025/03/11 17:49:02 Donor: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:49:02 Opening connection to 10.93.65.41 2025/03/11 17:49:02 Clone required: true 2025/03/11 17:49:02 Checking if a clone in progress 2025/03/11 17:49:02 Clone in progress: false 2025/03/11 17:49:02 Cloning from recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:49:04 Clone finished. Restarting container... kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:04 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:04 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:05 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 245ms (245ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:05 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:05 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:05 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:05 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 253ms (253ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:05 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:05 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:07 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 221ms (221ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:42 +0000 UTC Normal Pod recreate-mysql-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:50 +0000 UTC Normal Pod recreate-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-99573725-61be-495c-8230-0c642a4aecbf" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:52 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:52 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 273ms (273ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:52 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:52 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 223ms (223ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 240ms (240ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:55 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:55 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:55 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 234ms (234ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:55 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:49:55 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:50:13 +0000 UTC Warning Pod recreate-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/03/11 17:50:12 Peers: [6132613566393735.recreate-mysql-unready.kuttl-test-ethical-krill 6332636663336338.recreate-mysql-unready.kuttl-test-ethical-krill 6462633466366532.recreate-mysql-unready.kuttl-test-ethical-krill] 2025/03/11 17:50:12 FQDN: recreate-mysql-2.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:50:12 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill recreate-mysql-2.recreate-mysql.kuttl-test-ethical-krill] 2025/03/11 17:50:12 lookup recreate-mysql-2 [10.93.66.33] 2025/03/11 17:50:12 PodIP: 10.93.66.33 2025/03/11 17:50:12 lookup recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill [10.93.64.7] 2025/03/11 17:50:12 PrimaryIP: 10.93.64.7 2025/03/11 17:50:12 Donor: recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:50:12 Opening connection to 10.93.66.33 2025/03/11 17:50:12 Clone required: true 2025/03/11 17:50:12 Checking if a clone in progress 2025/03/11 17:50:12 Clone in progress: false 2025/03/11 17:50:12 Cloning from recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:50:13 Clone finished. Restarting container... kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:50:13 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:50:16 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 255ms (255ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:04 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:04 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:04 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:08 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:08 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:09 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:20 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:20 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:20 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:20 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:21 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:21 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:21 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:21 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:21 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:21 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:21 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:22 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:22 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:22 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:23 +0000 UTC Warning Pod recreate-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/03/11 17:51:23 readiness check failed: connect to db: ping DB: dial tcp 10.93.64.7:33062: connect: connection refused kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:27 +0000 UTC Warning Pod recreate-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:47 +0000 UTC Normal Pod recreate-mysql-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:51:47 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Pod recreate-mysql-0 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:02 +0000 UTC Normal Pod recreate-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-3414c18a-9086-4e36-996e-6b235f75b566" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:03 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:03 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 288ms (288ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:03 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:03 +0000 UTC Normal Pod recreate-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:05 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:05 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 236ms (236ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:05 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:05 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:05 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:06 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 254ms (254ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:06 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:06 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:06 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:06 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 236ms (236ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:06 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:06 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:08 +0000 UTC Normal Pod recreate-orc-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:08 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulCreate create Pod recreate-orc-0 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:09 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:09 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 257ms (257ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:09 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:09 +0000 UTC Normal Pod recreate-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:11 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:11 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 229ms (229ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:11 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:11 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:11 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:12 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 231ms (231ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:12 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:12 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:38 +0000 UTC Normal Pod recreate-mysql-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:38 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Pod recreate-mysql-1 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:43 +0000 UTC Normal Pod recreate-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-0 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-0647 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:43 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulCreate create Pod recreate-haproxy-0 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:44 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:44 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 296ms (296ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:44 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:44 +0000 UTC Normal Pod recreate-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:44 +0000 UTC Normal Pod recreate-orc-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:44 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:44 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulCreate create Pod recreate-orc-1 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:45 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 263ms (263ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:45 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:45 +0000 UTC Normal Pod recreate-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 245ms (245ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7348f673-b4fa-4330-ab05-5d7c0b6b17c3" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 239ms (239ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:46 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 220ms (220ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-1 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulCreate create Pod recreate-haproxy-1 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 268ms (268ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 232ms (232ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:47 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:48 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:48 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 260ms (260ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:48 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:48 +0000 UTC Normal Pod recreate-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 257ms (257ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 224ms (224ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 252ms (252ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 226ms (226ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:50 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:51 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 237ms (237ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:51 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:51 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:51 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:52 +0000 UTC Normal Pod recreate-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-haproxy-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:52 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:52 +0000 UTC Normal StatefulSet.apps recreate-haproxy SuccessfulCreate create Pod recreate-haproxy-2 in StatefulSet recreate-haproxy successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:53 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 269ms (269ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:53 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:53 +0000 UTC Normal Pod recreate-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:55 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:55 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 244ms (244ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:55 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:55 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:55 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:55 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 247ms (247ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:55 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:52:55 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:08 +0000 UTC Warning Pod recreate-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/03/11 17:53:07 Peers: [3365333035663366.recreate-mysql-unready.kuttl-test-ethical-krill 6438666463623733.recreate-mysql-unready.kuttl-test-ethical-krill] 2025/03/11 17:53:07 FQDN: recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:53:07 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill] 2025/03/11 17:53:07 lookup recreate-mysql-1 [10.93.66.34] 2025/03/11 17:53:07 PodIP: 10.93.66.34 2025/03/11 17:53:07 lookup recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill [10.93.64.9] 2025/03/11 17:53:07 PrimaryIP: 10.93.64.9 2025/03/11 17:53:07 Donor: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:53:07 Opening connection to 10.93.66.34 2025/03/11 17:53:07 Clone required: true 2025/03/11 17:53:07 Checking if a clone in progress 2025/03/11 17:53:07 Clone in progress: false 2025/03/11 17:53:07 Cloning from recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:53:08 Clone finished. Restarting container... kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:08 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:12 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 230ms (230ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:19 +0000 UTC Normal Pod recreate-orc-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-orc-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-vdww default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:19 +0000 UTC Normal StatefulSet.apps recreate-orc SuccessfulCreate create Pod recreate-orc-2 in StatefulSet recreate-orc successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:20 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:20 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 276ms (276ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:20 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:20 +0000 UTC Normal Pod recreate-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 260ms (260ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 191ms (191ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:22 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:42 +0000 UTC Normal Pod recreate-mysql-2 Binding Scheduled Successfully assigned kuttl-test-ethical-krill/recreate-mysql-2 to gke-jen-ps-825-808887c6--default-pool-9007dcbb-qcz1 default-scheduler logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:42 +0000 UTC Normal StatefulSet.apps recreate-mysql SuccessfulCreate create Pod recreate-mysql-2 in StatefulSet recreate-mysql successful statefulset-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:50 +0000 UTC Normal Pod recreate-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-99573725-61be-495c-8230-0c642a4aecbf" attachdetach-controller logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:51 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-825-808887c6" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:51 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-825-808887c6" in 314ms (314ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:51 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:51 +0000 UTC Normal Pod recreate-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:53 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:53 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 251ms (251ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 311ms (311ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 287ms (287ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:54 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:53:55 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:54:12 +0000 UTC Warning Pod recreate-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/03/11 17:54:11 Peers: [3365333035663366.recreate-mysql-unready.kuttl-test-ethical-krill 6331353132346135.recreate-mysql-unready.kuttl-test-ethical-krill 6438666463623733.recreate-mysql-unready.kuttl-test-ethical-krill] 2025/03/11 17:54:11 FQDN: recreate-mysql-2.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:54:11 Primary: recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill Replicas: [recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill recreate-mysql-2.recreate-mysql.kuttl-test-ethical-krill] 2025/03/11 17:54:11 lookup recreate-mysql-2 [10.93.65.46] 2025/03/11 17:54:11 PodIP: 10.93.65.46 2025/03/11 17:54:11 lookup recreate-mysql-0.recreate-mysql.kuttl-test-ethical-krill [10.93.64.9] 2025/03/11 17:54:11 PrimaryIP: 10.93.64.9 2025/03/11 17:54:11 Donor: recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:54:11 Opening connection to 10.93.65.46 2025/03/11 17:54:11 Clone required: true 2025/03/11 17:54:11 Checking if a clone in progress 2025/03/11 17:54:11 Clone in progress: false 2025/03/11 17:54:11 Cloning from recreate-mysql-1.recreate-mysql.kuttl-test-ethical-krill 2025/03/11 17:54:12 Clone finished. Restarting container... kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:54:12 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:54:16 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 244ms (244ms including waiting) kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:08 +0000 UTC Normal Pod recreate-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:09 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:09 +0000 UTC Normal Pod recreate-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 17:55:15 | recreate | 2025-03-11 17:55:13 +0000 UTC Warning Pod recreate-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/03/11 17:55:13 readiness check failed: connect to db: ping DB: dial tcp 10.93.64.9:33062: connect: connection refused kubelet logger.go:42: 17:55:15 | recreate | Deleting namespace: kuttl-test-ethical-krill === NAME kuttl harness.go:407: run tests finished harness.go:515: cleaning up harness.go:572: removing temp folder: "" --- PASS: kuttl (1000.12s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/recreate (999.68s) PASS