=== RUN kuttl harness.go:464: starting setup harness.go:255: running tests using configured kubeconfig. harness.go:278: Successful connection to cluster at: https://34.68.224.18 harness.go:363: running tests harness.go:75: going to run test suite with timeout of 180 seconds for each step harness.go:375: testsuite: e2e-tests/tests has 34 tests === RUN kuttl/harness === RUN kuttl/harness/haproxy === PAUSE kuttl/harness/haproxy === CONT kuttl/harness/haproxy logger.go:42: 12:03:01 | haproxy | Creating namespace: kuttl-test-lucky-shepherd logger.go:42: 12:03:02 | haproxy/0-deploy-operator | starting test step 0-deploy-operator logger.go:42: 12:03:02 | haproxy/0-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep deploy_operator deploy_non_tls_cluster_secrets deploy_tls_cluster_secrets deploy_client] logger.go:42: 12:03:02 | haproxy/0-deploy-operator | + source ../../functions logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ realpath ../../.. logger.go:42: 12:03:02 | haproxy/0-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | ++++ pwd logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | ++ test_name=haproxy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ GIT_BRANCH=PR-913 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ VERSION=PR-913-7e558330 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export MINIO_VER=5.4.0 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ MINIO_VER=5.4.0 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | ++++ which gdate logger.go:42: 12:03:02 | haproxy/0-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:03:02 | haproxy/0-deploy-operator | ++++ which date logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ date=/usr/bin/date logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ oc get projects logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ : logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ kubectl get nodes logger.go:42: 12:03:02 | haproxy/0-deploy-operator | +++ grep '^minikube' logger.go:42: 12:03:02 | haproxy/0-deploy-operator | ++ oc get projects logger.go:42: 12:03:02 | haproxy/0-deploy-operator | + init_temp_dir logger.go:42: 12:03:02 | haproxy/0-deploy-operator | + rm -rf /tmp/kuttl/ps/haproxy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | + mkdir -p /tmp/kuttl/ps/haproxy logger.go:42: 12:03:02 | haproxy/0-deploy-operator | + deploy_operator logger.go:42: 12:03:02 | haproxy/0-deploy-operator | + destroy_operator logger.go:42: 12:03:02 | haproxy/0-deploy-operator | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 12:03:02 | haproxy/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:03:02 | haproxy/0-deploy-operator | deployment.apps "percona-server-mysql-operator" force deleted logger.go:42: 12:03:03 | haproxy/0-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 12:03:03 | haproxy/0-deploy-operator | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 12:03:03 | haproxy/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:03:03 | haproxy/0-deploy-operator | namespace "ps-operator" force deleted logger.go:42: 12:03:09 | haproxy/0-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 12:03:09 | haproxy/0-deploy-operator | + create_namespace ps-operator logger.go:42: 12:03:09 | haproxy/0-deploy-operator | + local namespace=ps-operator logger.go:42: 12:03:09 | haproxy/0-deploy-operator | + [[ -n '' ]] logger.go:42: 12:03:09 | haproxy/0-deploy-operator | + kubectl delete namespace ps-operator --ignore-not-found logger.go:42: 12:03:09 | haproxy/0-deploy-operator | + kubectl wait --for=delete namespace ps-operator logger.go:42: 12:03:10 | haproxy/0-deploy-operator | + kubectl create namespace ps-operator logger.go:42: 12:03:10 | haproxy/0-deploy-operator | namespace/ps-operator created logger.go:42: 12:03:10 | haproxy/0-deploy-operator | + kubectl -n ps-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy/crd.yaml logger.go:42: 12:03:11 | haproxy/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 12:03:11 | haproxy/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 12:03:12 | haproxy/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 12:03:12 | haproxy/0-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 12:03:12 | haproxy/0-deploy-operator | + kubectl -n ps-operator apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy/cw-rbac.yaml logger.go:42: 12:03:13 | haproxy/0-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 12:03:13 | haproxy/0-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 12:03:13 | haproxy/0-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged logger.go:42: 12:03:14 | haproxy/0-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 12:03:14 | haproxy/0-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged logger.go:42: 12:03:14 | haproxy/0-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 12:03:14 | haproxy/0-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "DEBUG"' logger.go:42: 12:03:14 | haproxy/0-deploy-operator | + kubectl -n ps-operator apply -f - logger.go:42: 12:03:14 | haproxy/0-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:03:14 | haproxy/0-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-913-7e558330"' /mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy/cw-operator.yaml logger.go:42: 12:03:15 | haproxy/0-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 12:03:15 | haproxy/0-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 12:03:15 | haproxy/0-deploy-operator | + deploy_non_tls_cluster_secrets logger.go:42: 12:03:15 | haproxy/0-deploy-operator | + kubectl -n kuttl-test-lucky-shepherd apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf/secrets.yaml logger.go:42: 12:03:16 | haproxy/0-deploy-operator | secret/test-secrets created logger.go:42: 12:03:16 | haproxy/0-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 12:03:16 | haproxy/0-deploy-operator | + kubectl -n kuttl-test-lucky-shepherd apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf/ssl-secret.yaml logger.go:42: 12:03:17 | haproxy/0-deploy-operator | secret/test-ssl created logger.go:42: 12:03:17 | haproxy/0-deploy-operator | + deploy_client logger.go:42: 12:03:17 | haproxy/0-deploy-operator | + kubectl -n kuttl-test-lucky-shepherd apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf/client.yaml logger.go:42: 12:03:18 | haproxy/0-deploy-operator | pod/mysql-client created logger.go:42: 12:03:19 | haproxy/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 12:03:19 | haproxy/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 12:03:19 | haproxy/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 12:03:20 | haproxy/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 12:03:20 | haproxy/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 12:03:21 | haproxy/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 12:03:22 | haproxy/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 12:03:22 | haproxy/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 12:03:22 | haproxy/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 12:03:24 | haproxy/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 12:03:24 | haproxy/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 12:03:24 | haproxy/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 12:03:25 | haproxy/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 12:03:25 | haproxy/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 12:03:26 | haproxy/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 12:03:27 | haproxy/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 12:03:27 | haproxy/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 12:03:27 | haproxy/0-deploy-operator | INFO Found 1 resource(s). logger.go:42: 12:03:27 | haproxy/0-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 12:03:27 | haproxy/0-deploy-operator | percona-server-mysql-operator ps-operator 1 logger.go:42: 12:03:27 | haproxy/0-deploy-operator | ASSERT PASS logger.go:42: 12:03:27 | haproxy/0-deploy-operator | test step completed 0-deploy-operator logger.go:42: 12:03:27 | haproxy/1-create-cluster | starting test step 1-create-cluster logger.go:42: 12:03:27 | haproxy/1-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.updateStrategy="RollingUpdate"' - \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval '.spec.orchestrator.enabled=true' - \ | yq eval '.spec.proxy.haproxy.enabled=true' - \ | yq eval '.spec.proxy.haproxy.size=3' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 12:03:28 | haproxy/1-create-cluster | + source ../../functions logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ realpath ../../.. logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++++ pwd logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ test_name=haproxy logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ GIT_BRANCH=PR-913 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ VERSION=PR-913-7e558330 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export MINIO_VER=5.4.0 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ MINIO_VER=5.4.0 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++++ which gdate logger.go:42: 12:03:28 | haproxy/1-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++++ which date logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ date=/usr/bin/date logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ oc get projects logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ : logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ kubectl get nodes logger.go:42: 12:03:28 | haproxy/1-create-cluster | +++ grep '^minikube' logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ oc get projects logger.go:42: 12:03:28 | haproxy/1-create-cluster | + get_cr logger.go:42: 12:03:28 | haproxy/1-create-cluster | + local name_suffix= logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.updateStrategy="RollingUpdate"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + '[' -n '' ']' logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + kubectl -n kuttl-test-lucky-shepherd apply -f - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.metadata.name="%s"' haproxy logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.metadata.name="haproxy"' /mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy/cr.yaml logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-913-7e558330"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 12:03:28 | haproxy/1-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:03:28 | haproxy/1-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 12:03:29 | haproxy/1-create-cluster | perconaservermysql.ps.percona.com/haproxy created logger.go:42: 12:06:42 | haproxy/1-create-cluster | test step completed 1-create-cluster logger.go:42: 12:06:42 | haproxy/2-write-data | starting test step 2-write-data logger.go:42: 12:06:42 | haproxy/2-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"] logger.go:42: 12:06:42 | haproxy/2-write-data | + source ../../functions logger.go:42: 12:06:42 | haproxy/2-write-data | +++ realpath ../../.. logger.go:42: 12:06:42 | haproxy/2-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:42 | haproxy/2-write-data | ++++ pwd logger.go:42: 12:06:42 | haproxy/2-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:06:42 | haproxy/2-write-data | ++ test_name=haproxy logger.go:42: 12:06:42 | haproxy/2-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:06:42 | haproxy/2-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:06:42 | haproxy/2-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:06:42 | haproxy/2-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:06:42 | haproxy/2-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:06:42 | haproxy/2-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ GIT_BRANCH=PR-913 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ VERSION=PR-913-7e558330 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:42 | haproxy/2-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export MINIO_VER=5.4.0 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ MINIO_VER=5.4.0 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:42 | haproxy/2-write-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:42 | haproxy/2-write-data | ++++ which gdate logger.go:42: 12:06:42 | haproxy/2-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:06:42 | haproxy/2-write-data | ++++ which date logger.go:42: 12:06:42 | haproxy/2-write-data | +++ date=/usr/bin/date logger.go:42: 12:06:42 | haproxy/2-write-data | +++ oc get projects logger.go:42: 12:06:42 | haproxy/2-write-data | +++ : logger.go:42: 12:06:42 | haproxy/2-write-data | +++ kubectl get nodes logger.go:42: 12:06:42 | haproxy/2-write-data | +++ grep '^minikube' logger.go:42: 12:06:42 | haproxy/2-write-data | ++ oc get projects logger.go:42: 12:06:42 | haproxy/2-write-data | +++ get_cluster_name logger.go:42: 12:06:42 | haproxy/2-write-data | +++ kubectl -n kuttl-test-lucky-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:06:43 | haproxy/2-write-data | ++ get_haproxy_svc haproxy logger.go:42: 12:06:43 | haproxy/2-write-data | ++ local cluster=haproxy logger.go:42: 12:06:43 | haproxy/2-write-data | ++ echo haproxy-haproxy logger.go:42: 12:06:43 | haproxy/2-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h haproxy-haproxy -uroot -proot_password' logger.go:42: 12:06:43 | haproxy/2-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 12:06:43 | haproxy/2-write-data | + local 'uri=-h haproxy-haproxy -uroot -proot_password' logger.go:42: 12:06:43 | haproxy/2-write-data | + local pod= logger.go:42: 12:06:43 | haproxy/2-write-data | ++ get_client_pod logger.go:42: 12:06:43 | haproxy/2-write-data | ++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:06:43 | haproxy/2-write-data | + client_pod=mysql-client logger.go:42: 12:06:43 | haproxy/2-write-data | + wait_pod mysql-client logger.go:42: 12:06:43 | haproxy/2-write-data | + local pod=mysql-client logger.go:42: 12:06:43 | haproxy/2-write-data | + set +o xtrace logger.go:42: 12:06:43 | haproxy/2-write-data | mysql-clienttrue logger.go:42: 12:06:43 | haproxy/2-write-data | + kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h haproxy-haproxy -uroot -proot_password' logger.go:42: 12:06:43 | haproxy/2-write-data | + sed -e 's/mysql: //' logger.go:42: 12:06:43 | haproxy/2-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:06:44 | haproxy/2-write-data | + : logger.go:42: 12:06:44 | haproxy/2-write-data | +++ get_cluster_name logger.go:42: 12:06:44 | haproxy/2-write-data | +++ kubectl -n kuttl-test-lucky-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:06:45 | haproxy/2-write-data | ++ get_haproxy_svc haproxy logger.go:42: 12:06:45 | haproxy/2-write-data | ++ local cluster=haproxy logger.go:42: 12:06:45 | haproxy/2-write-data | ++ echo haproxy-haproxy logger.go:42: 12:06:45 | haproxy/2-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h haproxy-haproxy -uroot -proot_password' logger.go:42: 12:06:45 | haproxy/2-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)' logger.go:42: 12:06:45 | haproxy/2-write-data | + local 'uri=-h haproxy-haproxy -uroot -proot_password' logger.go:42: 12:06:45 | haproxy/2-write-data | + local pod= logger.go:42: 12:06:45 | haproxy/2-write-data | ++ get_client_pod logger.go:42: 12:06:45 | haproxy/2-write-data | ++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:06:45 | haproxy/2-write-data | + client_pod=mysql-client logger.go:42: 12:06:45 | haproxy/2-write-data | + wait_pod mysql-client logger.go:42: 12:06:45 | haproxy/2-write-data | + local pod=mysql-client logger.go:42: 12:06:45 | haproxy/2-write-data | + set +o xtrace logger.go:42: 12:06:45 | haproxy/2-write-data | mysql-clienttrue logger.go:42: 12:06:45 | haproxy/2-write-data | + kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h haproxy-haproxy -uroot -proot_password' logger.go:42: 12:06:45 | haproxy/2-write-data | + sed -e 's/mysql: //' logger.go:42: 12:06:45 | haproxy/2-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:06:46 | haproxy/2-write-data | + : [controller-runtime] log.SetLogger(...) was never called; logs will not be displayed. Detected at: > goroutine 36 [running]: > runtime/debug.Stack() > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/runtime/debug/stack.go:24 +0x5e > sigs.k8s.io/controller-runtime/pkg/log.eventuallyFulfillRoot() > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/log.go:60 +0xcd > sigs.k8s.io/controller-runtime/pkg/log.(*delegatingLogSink).WithName(0xc0002a9c00, {0x184a055, 0x14}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/deleg.go:147 +0x3e > github.com/go-logr/logr.Logger.WithName({{0x1acb7d8, 0xc0002a9c00}, 0x0}, {0x184a055?, 0xc00068bf80?}) > /home/mowsiany/go/pkg/mod/github.com/go-logr/logr@v1.2.4/logr.go:336 +0x36 > sigs.k8s.io/controller-runtime/pkg/client.newClient(0x131ead3?, {0x0, 0xc00043ea10, {0x1accd90, 0xc00031e540}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:122 +0xf1 > sigs.k8s.io/controller-runtime/pkg/client.New(0xc0000ea6c8?, {0x0, 0xc00043ea10, {0x1accd90, 0xc00031e540}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:103 +0x7d > github.com/kudobuilder/kuttl/pkg/test/utils.NewRetryClient(0xc0000ea6c8, {0x0, 0xc00043ea10, {0x1accd90, 0xc00031e540}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/utils/kubernetes.go:177 +0x127 > github.com/kudobuilder/kuttl/pkg/test.(*Harness).Client(0xc00042cc08, 0x76?) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:323 +0x18e > github.com/kudobuilder/kuttl/pkg/test.(*Step).Create(0xc0005308f0, 0xc00043c340, {0xc00004bda0, 0x19}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:177 +0x63 > github.com/kudobuilder/kuttl/pkg/test.(*Step).Run(0xc0005308f0, 0xc00043c340, {0xc00004bda0, 0x19}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:457 +0x24a > github.com/kudobuilder/kuttl/pkg/test.(*Case).Run(0xc00040efa0, 0xc00043c340, 0xc0002f8360) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/case.go:373 +0xaeb > github.com/kudobuilder/kuttl/pkg/test.(*Harness).RunTests.func1.1(0xc00043c340) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:401 +0x12e > testing.tRunner(0xc00043c340, 0xc0005127e0) > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1689 +0xfb > created by testing.(*T).Run in goroutine 35 > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1742 +0x390 logger.go:42: 12:06:46 | haproxy/2-write-data | test step completed 2-write-data logger.go:42: 12:06:46 | haproxy/3-read-from-primary | starting test step 3-read-from-primary logger.go:42: 12:06:46 | haproxy/3-read-from-primary | running command: [sh -c set -o errexit set -o xtrace source ../../functions data=$(run_mysql "SELECT * FROM myDB.myTable" "-h $(get_haproxy_svc $(get_cluster_name)) -P3306 -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 03-read-from-primary --from-literal=data="${data}"] logger.go:42: 12:06:46 | haproxy/3-read-from-primary | + source ../../functions logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ realpath ../../.. logger.go:42: 12:06:46 | haproxy/3-read-from-primary | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | ++++ pwd logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:06:46 | haproxy/3-read-from-primary | ++ test_name=haproxy logger.go:42: 12:06:46 | haproxy/3-read-from-primary | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:06:46 | haproxy/3-read-from-primary | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ GIT_BRANCH=PR-913 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ VERSION=PR-913-7e558330 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export MINIO_VER=5.4.0 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ MINIO_VER=5.4.0 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:46 | haproxy/3-read-from-primary | ++++ which gdate logger.go:42: 12:06:46 | haproxy/3-read-from-primary | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:06:46 | haproxy/3-read-from-primary | ++++ which date logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ date=/usr/bin/date logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ oc get projects logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ : logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ kubectl get nodes logger.go:42: 12:06:46 | haproxy/3-read-from-primary | +++ grep '^minikube' logger.go:42: 12:06:47 | haproxy/3-read-from-primary | ++ oc get projects logger.go:42: 12:06:47 | haproxy/3-read-from-primary | ++++ get_cluster_name logger.go:42: 12:06:47 | haproxy/3-read-from-primary | ++++ kubectl -n kuttl-test-lucky-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:06:48 | haproxy/3-read-from-primary | +++ get_haproxy_svc haproxy logger.go:42: 12:06:48 | haproxy/3-read-from-primary | +++ local cluster=haproxy logger.go:42: 12:06:48 | haproxy/3-read-from-primary | +++ echo haproxy-haproxy logger.go:42: 12:06:48 | haproxy/3-read-from-primary | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h haproxy-haproxy -P3306 -uroot -proot_password' logger.go:42: 12:06:48 | haproxy/3-read-from-primary | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:06:48 | haproxy/3-read-from-primary | ++ local 'uri=-h haproxy-haproxy -P3306 -uroot -proot_password' logger.go:42: 12:06:48 | haproxy/3-read-from-primary | ++ local pod= logger.go:42: 12:06:48 | haproxy/3-read-from-primary | +++ get_client_pod logger.go:42: 12:06:48 | haproxy/3-read-from-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:06:48 | haproxy/3-read-from-primary | ++ client_pod=mysql-client logger.go:42: 12:06:48 | haproxy/3-read-from-primary | ++ wait_pod mysql-client logger.go:42: 12:06:48 | haproxy/3-read-from-primary | ++ local pod=mysql-client logger.go:42: 12:06:48 | haproxy/3-read-from-primary | ++ set +o xtrace logger.go:42: 12:06:49 | haproxy/3-read-from-primary | mysql-clienttrue logger.go:42: 12:06:49 | haproxy/3-read-from-primary | ++ sed -e 's/mysql: //' logger.go:42: 12:06:49 | haproxy/3-read-from-primary | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:06:49 | haproxy/3-read-from-primary | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h haproxy-haproxy -P3306 -uroot -proot_password' logger.go:42: 12:06:49 | haproxy/3-read-from-primary | + data=100500 logger.go:42: 12:06:49 | haproxy/3-read-from-primary | + kubectl create configmap -n kuttl-test-lucky-shepherd 03-read-from-primary --from-literal=data=100500 logger.go:42: 12:06:50 | haproxy/3-read-from-primary | configmap/03-read-from-primary created logger.go:42: 12:06:50 | haproxy/3-read-from-primary | test step completed 3-read-from-primary logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | starting test step 4-read-from-replicas logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | running command: [sh -c set -o errexit set -o xtrace source ../../functions data=$(run_mysql "SELECT * FROM myDB.myTable" "-h $(get_haproxy_svc $(get_cluster_name)) -P3307 -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-read-from-replicas --from-literal=${test_name}-haproxy-replicas=${data}] logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | + source ../../functions logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ realpath ../../.. logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | ++++ pwd logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | ++ test_name=haproxy logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ GIT_BRANCH=PR-913 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ VERSION=PR-913-7e558330 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export MINIO_VER=5.4.0 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ MINIO_VER=5.4.0 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | ++++ which gdate logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | ++++ which date logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ date=/usr/bin/date logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ oc get projects logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ : logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ kubectl get nodes logger.go:42: 12:06:50 | haproxy/4-read-from-replicas | +++ grep '^minikube' logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ oc get projects logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++++ get_cluster_name logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++++ kubectl -n kuttl-test-lucky-shepherd get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | +++ get_haproxy_svc haproxy logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | +++ local cluster=haproxy logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | +++ echo haproxy-haproxy logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h haproxy-haproxy -P3307 -uroot -proot_password' logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ local 'uri=-h haproxy-haproxy -P3307 -uroot -proot_password' logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ local pod= logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | +++ get_client_pod logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ client_pod=mysql-client logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ wait_pod mysql-client logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ local pod=mysql-client logger.go:42: 12:06:51 | haproxy/4-read-from-replicas | ++ set +o xtrace logger.go:42: 12:06:52 | haproxy/4-read-from-replicas | mysql-clienttrue logger.go:42: 12:06:52 | haproxy/4-read-from-replicas | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h haproxy-haproxy -P3307 -uroot -proot_password' logger.go:42: 12:06:52 | haproxy/4-read-from-replicas | ++ sed -e 's/mysql: //' logger.go:42: 12:06:52 | haproxy/4-read-from-replicas | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:06:53 | haproxy/4-read-from-replicas | + data=100500 logger.go:42: 12:06:53 | haproxy/4-read-from-replicas | + kubectl create configmap -n kuttl-test-lucky-shepherd 04-read-from-replicas --from-literal=haproxy-haproxy-replicas=100500 logger.go:42: 12:06:53 | haproxy/4-read-from-replicas | configmap/04-read-from-replicas created logger.go:42: 12:06:53 | haproxy/4-read-from-replicas | test step completed 4-read-from-replicas logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | starting test step 5-check-pods-have-same-primary logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | running command: [sh -c set -o errexit set -o xtrace source ../../functions data=() for i in $(seq 0 2); do data+=("$(get_primary_from_haproxy ${test_name}-haproxy-$i)") done if [ "${data[0]}" != "${data[1]}" -o "${data[1]}" != "${data[2]}" ]; then echo "Not all haproxy pods point to same primary: 0: ${data[0]} 1: ${data[1]} 2: ${data[2]}" exit 1 fi] logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | + source ../../functions logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ realpath ../../.. logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | ++++ pwd logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | ++ test_name=haproxy logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ GIT_BRANCH=PR-913 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ VERSION=PR-913-7e558330 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export MINIO_VER=5.4.0 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ MINIO_VER=5.4.0 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | ++++ which gdate logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | ++++ which date logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ date=/usr/bin/date logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ oc get projects logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ : logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ kubectl get nodes logger.go:42: 12:06:53 | haproxy/5-check-pods-have-same-primary | +++ grep '^minikube' logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ oc get projects logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | + data=() logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ seq 0 2 logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | + for i in '$(seq 0 2)' logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | + data+=("$(get_primary_from_haproxy ${test_name}-haproxy-$i)") logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ get_primary_from_haproxy haproxy-haproxy-0 logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ local haproxy_pod=haproxy-haproxy-0 logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods haproxy-haproxy-0 -o 'jsonpath={.status.podIP}' logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ local haproxy_pod_ip=10.41.41.27 logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ awk '{print $2}' logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ run_mysql 'SHOW VARIABLES LIKE '\''%hostname%'\'';' '-h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ local 'command=SHOW VARIABLES LIKE '\''%hostname%'\'';' logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ local 'uri=-h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | ++ local pod= logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | +++ get_client_pod logger.go:42: 12:06:54 | haproxy/5-check-pods-have-same-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:06:55 | haproxy/5-check-pods-have-same-primary | ++ client_pod=mysql-client logger.go:42: 12:06:55 | haproxy/5-check-pods-have-same-primary | ++ wait_pod mysql-client logger.go:42: 12:06:55 | haproxy/5-check-pods-have-same-primary | ++ local pod=mysql-client logger.go:42: 12:06:55 | haproxy/5-check-pods-have-same-primary | ++ set +o xtrace logger.go:42: 12:06:55 | haproxy/5-check-pods-have-same-primary | mysql-clienttrue logger.go:42: 12:06:55 | haproxy/5-check-pods-have-same-primary | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SHOW VARIABLES LIKE '\''%hostname%'\'';" | mysql -sN -h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:06:55 | haproxy/5-check-pods-have-same-primary | ++ sed -e 's/mysql: //' logger.go:42: 12:06:55 | haproxy/5-check-pods-have-same-primary | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | + for i in '$(seq 0 2)' logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | + data+=("$(get_primary_from_haproxy ${test_name}-haproxy-$i)") logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | ++ get_primary_from_haproxy haproxy-haproxy-1 logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | ++ local haproxy_pod=haproxy-haproxy-1 logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods haproxy-haproxy-1 -o 'jsonpath={.status.podIP}' logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | ++ local haproxy_pod_ip=10.41.42.28 logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | ++ run_mysql 'SHOW VARIABLES LIKE '\''%hostname%'\'';' '-h 10.41.42.28 -P3306 -uroot -proot_password' logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | ++ local 'command=SHOW VARIABLES LIKE '\''%hostname%'\'';' logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | ++ awk '{print $2}' logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | ++ local 'uri=-h 10.41.42.28 -P3306 -uroot -proot_password' logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | ++ local pod= logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | +++ get_client_pod logger.go:42: 12:06:56 | haproxy/5-check-pods-have-same-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:06:57 | haproxy/5-check-pods-have-same-primary | ++ client_pod=mysql-client logger.go:42: 12:06:57 | haproxy/5-check-pods-have-same-primary | ++ wait_pod mysql-client logger.go:42: 12:06:57 | haproxy/5-check-pods-have-same-primary | ++ local pod=mysql-client logger.go:42: 12:06:57 | haproxy/5-check-pods-have-same-primary | ++ set +o xtrace logger.go:42: 12:06:57 | haproxy/5-check-pods-have-same-primary | mysql-clienttrue logger.go:42: 12:06:57 | haproxy/5-check-pods-have-same-primary | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SHOW VARIABLES LIKE '\''%hostname%'\'';" | mysql -sN -h 10.41.42.28 -P3306 -uroot -proot_password' logger.go:42: 12:06:57 | haproxy/5-check-pods-have-same-primary | ++ sed -e 's/mysql: //' logger.go:42: 12:06:57 | haproxy/5-check-pods-have-same-primary | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:06:58 | haproxy/5-check-pods-have-same-primary | + for i in '$(seq 0 2)' logger.go:42: 12:06:58 | haproxy/5-check-pods-have-same-primary | + data+=("$(get_primary_from_haproxy ${test_name}-haproxy-$i)") logger.go:42: 12:06:58 | haproxy/5-check-pods-have-same-primary | ++ get_primary_from_haproxy haproxy-haproxy-2 logger.go:42: 12:06:58 | haproxy/5-check-pods-have-same-primary | ++ local haproxy_pod=haproxy-haproxy-2 logger.go:42: 12:06:58 | haproxy/5-check-pods-have-same-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods haproxy-haproxy-2 -o 'jsonpath={.status.podIP}' logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ local haproxy_pod_ip=10.41.40.32 logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ run_mysql 'SHOW VARIABLES LIKE '\''%hostname%'\'';' '-h 10.41.40.32 -P3306 -uroot -proot_password' logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ awk '{print $2}' logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ local 'command=SHOW VARIABLES LIKE '\''%hostname%'\'';' logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ local 'uri=-h 10.41.40.32 -P3306 -uroot -proot_password' logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ local pod= logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | +++ get_client_pod logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ client_pod=mysql-client logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ wait_pod mysql-client logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ local pod=mysql-client logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ set +o xtrace logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | mysql-clienttrue logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SHOW VARIABLES LIKE '\''%hostname%'\'';" | mysql -sN -h 10.41.40.32 -P3306 -uroot -proot_password' logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ sed -e 's/mysql: //' logger.go:42: 12:06:59 | haproxy/5-check-pods-have-same-primary | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:07:00 | haproxy/5-check-pods-have-same-primary | + '[' haproxy-mysql-0 '!=' haproxy-mysql-0 -o haproxy-mysql-0 '!=' haproxy-mysql-0 ']' logger.go:42: 12:07:00 | haproxy/5-check-pods-have-same-primary | test step completed 5-check-pods-have-same-primary logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | starting test step 6-check-label-haproxy-primary logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary_pod_from_label="$(get_primary_from_label)" primary_pod_from_haproxy="$(get_primary_from_haproxy ${test_name}-haproxy-0)" if [ "${primary_pod_from_label}" != "${primary_pod_from_haproxy}" ]; then echo "Primary in k8s label (${primary_pod_from_label}) is not set to same pod as in haproxy (${primary_pod_from_haproxy})!" exit 1 fi] logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | + source ../../functions logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ realpath ../../.. logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | ++++ pwd logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | ++ test_name=haproxy logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ GIT_BRANCH=PR-913 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ VERSION=PR-913-7e558330 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export MINIO_VER=5.4.0 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ MINIO_VER=5.4.0 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | ++++ which gdate logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | ++++ which date logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ date=/usr/bin/date logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ oc get projects logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ : logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ kubectl get nodes logger.go:42: 12:07:00 | haproxy/6-check-label-haproxy-primary | +++ grep '^minikube' logger.go:42: 12:07:01 | haproxy/6-check-label-haproxy-primary | ++ oc get projects logger.go:42: 12:07:01 | haproxy/6-check-label-haproxy-primary | ++ get_primary_from_label logger.go:42: 12:07:01 | haproxy/6-check-label-haproxy-primary | ++ kubectl -n kuttl-test-lucky-shepherd get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 12:07:01 | haproxy/6-check-label-haproxy-primary | + primary_pod_from_label=haproxy-mysql-0 logger.go:42: 12:07:01 | haproxy/6-check-label-haproxy-primary | ++ get_primary_from_haproxy haproxy-haproxy-0 logger.go:42: 12:07:01 | haproxy/6-check-label-haproxy-primary | ++ local haproxy_pod=haproxy-haproxy-0 logger.go:42: 12:07:01 | haproxy/6-check-label-haproxy-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods haproxy-haproxy-0 -o 'jsonpath={.status.podIP}' logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ local haproxy_pod_ip=10.41.41.27 logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ run_mysql 'SHOW VARIABLES LIKE '\''%hostname%'\'';' '-h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ local 'command=SHOW VARIABLES LIKE '\''%hostname%'\'';' logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ awk '{print $2}' logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ local 'uri=-h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ local pod= logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | +++ get_client_pod logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ client_pod=mysql-client logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ wait_pod mysql-client logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ local pod=mysql-client logger.go:42: 12:07:02 | haproxy/6-check-label-haproxy-primary | ++ set +o xtrace logger.go:42: 12:07:03 | haproxy/6-check-label-haproxy-primary | mysql-clienttrue logger.go:42: 12:07:03 | haproxy/6-check-label-haproxy-primary | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SHOW VARIABLES LIKE '\''%hostname%'\'';" | mysql -sN -h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:03 | haproxy/6-check-label-haproxy-primary | ++ sed -e 's/mysql: //' logger.go:42: 12:07:03 | haproxy/6-check-label-haproxy-primary | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:07:03 | haproxy/6-check-label-haproxy-primary | + primary_pod_from_haproxy=haproxy-mysql-0 logger.go:42: 12:07:03 | haproxy/6-check-label-haproxy-primary | + '[' haproxy-mysql-0 '!=' haproxy-mysql-0 ']' logger.go:42: 12:07:03 | haproxy/6-check-label-haproxy-primary | test step completed 6-check-label-haproxy-primary logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | starting test step 7-check-primary-failover logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary_pod_from_label="$(get_primary_from_label)" kubectl -n "${NAMESPACE}" delete pod ${primary_pod_from_label} sleep 3 data=() for i in $(seq 0 2); do data+=("$(get_primary_from_haproxy ${test_name}-haproxy-$i)") done if [ "${data[0]}" != "${data[1]}" -o "${data[1]}" != "${data[2]}" ]; then echo "Not all haproxy pods point to same primary: 0: ${data[0]} 1: ${data[1]} 2: ${data[2]}" exit 1 fi primary_pod_from_label="$(get_primary_from_label)" primary_pod_from_haproxy="$(get_primary_from_haproxy ${test_name}-haproxy-0)" if [ "${primary_pod_from_label}" != "${primary_pod_from_haproxy}" ]; then echo "Primary in k8s label (${primary_pod_from_label}) is not set to same pod as in haproxy (${primary_pod_from_haproxy})!" exit 1 fi] logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | + source ../../functions logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ realpath ../../.. logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | ++++ pwd logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | ++ test_name=haproxy logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ GIT_BRANCH=PR-913 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ VERSION=PR-913-7e558330 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export MINIO_VER=5.4.0 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ MINIO_VER=5.4.0 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | ++++ which gdate logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | ++++ which date logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ date=/usr/bin/date logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ oc get projects logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ : logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ kubectl get nodes logger.go:42: 12:07:03 | haproxy/7-check-primary-failover | +++ grep '^minikube' logger.go:42: 12:07:04 | haproxy/7-check-primary-failover | ++ oc get projects logger.go:42: 12:07:04 | haproxy/7-check-primary-failover | ++ get_primary_from_label logger.go:42: 12:07:04 | haproxy/7-check-primary-failover | ++ kubectl -n kuttl-test-lucky-shepherd get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 12:07:04 | haproxy/7-check-primary-failover | + primary_pod_from_label=haproxy-mysql-0 logger.go:42: 12:07:04 | haproxy/7-check-primary-failover | + kubectl -n kuttl-test-lucky-shepherd delete pod haproxy-mysql-0 logger.go:42: 12:07:05 | haproxy/7-check-primary-failover | pod "haproxy-mysql-0" deleted logger.go:42: 12:07:25 | haproxy/7-check-primary-failover | + sleep 3 logger.go:42: 12:07:28 | haproxy/7-check-primary-failover | + data=() logger.go:42: 12:07:28 | haproxy/7-check-primary-failover | ++ seq 0 2 logger.go:42: 12:07:28 | haproxy/7-check-primary-failover | + for i in '$(seq 0 2)' logger.go:42: 12:07:28 | haproxy/7-check-primary-failover | + data+=("$(get_primary_from_haproxy ${test_name}-haproxy-$i)") logger.go:42: 12:07:28 | haproxy/7-check-primary-failover | ++ get_primary_from_haproxy haproxy-haproxy-0 logger.go:42: 12:07:28 | haproxy/7-check-primary-failover | ++ local haproxy_pod=haproxy-haproxy-0 logger.go:42: 12:07:28 | haproxy/7-check-primary-failover | +++ kubectl -n kuttl-test-lucky-shepherd get pods haproxy-haproxy-0 -o 'jsonpath={.status.podIP}' logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ local haproxy_pod_ip=10.41.41.27 logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ run_mysql 'SHOW VARIABLES LIKE '\''%hostname%'\'';' '-h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ awk '{print $2}' logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ local 'command=SHOW VARIABLES LIKE '\''%hostname%'\'';' logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ local 'uri=-h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ local pod= logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | +++ get_client_pod logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ client_pod=mysql-client logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ wait_pod mysql-client logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ local pod=mysql-client logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ set +o xtrace logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | mysql-clienttrue logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ sed -e 's/mysql: //' logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:07:29 | haproxy/7-check-primary-failover | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SHOW VARIABLES LIKE '\''%hostname%'\'';" | mysql -sN -h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:30 | haproxy/7-check-primary-failover | + for i in '$(seq 0 2)' logger.go:42: 12:07:30 | haproxy/7-check-primary-failover | + data+=("$(get_primary_from_haproxy ${test_name}-haproxy-$i)") logger.go:42: 12:07:30 | haproxy/7-check-primary-failover | ++ get_primary_from_haproxy haproxy-haproxy-1 logger.go:42: 12:07:30 | haproxy/7-check-primary-failover | ++ local haproxy_pod=haproxy-haproxy-1 logger.go:42: 12:07:30 | haproxy/7-check-primary-failover | +++ kubectl -n kuttl-test-lucky-shepherd get pods haproxy-haproxy-1 -o 'jsonpath={.status.podIP}' logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ local haproxy_pod_ip=10.41.42.28 logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ run_mysql 'SHOW VARIABLES LIKE '\''%hostname%'\'';' '-h 10.41.42.28 -P3306 -uroot -proot_password' logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ awk '{print $2}' logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ local 'command=SHOW VARIABLES LIKE '\''%hostname%'\'';' logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ local 'uri=-h 10.41.42.28 -P3306 -uroot -proot_password' logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ local pod= logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | +++ get_client_pod logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ client_pod=mysql-client logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ wait_pod mysql-client logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ local pod=mysql-client logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ set +o xtrace logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | mysql-clienttrue logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SHOW VARIABLES LIKE '\''%hostname%'\'';" | mysql -sN -h 10.41.42.28 -P3306 -uroot -proot_password' logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ sed -e 's/mysql: //' logger.go:42: 12:07:31 | haproxy/7-check-primary-failover | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:07:32 | haproxy/7-check-primary-failover | + for i in '$(seq 0 2)' logger.go:42: 12:07:32 | haproxy/7-check-primary-failover | + data+=("$(get_primary_from_haproxy ${test_name}-haproxy-$i)") logger.go:42: 12:07:32 | haproxy/7-check-primary-failover | ++ get_primary_from_haproxy haproxy-haproxy-2 logger.go:42: 12:07:32 | haproxy/7-check-primary-failover | ++ local haproxy_pod=haproxy-haproxy-2 logger.go:42: 12:07:32 | haproxy/7-check-primary-failover | +++ kubectl -n kuttl-test-lucky-shepherd get pods haproxy-haproxy-2 -o 'jsonpath={.status.podIP}' logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ local haproxy_pod_ip=10.41.40.32 logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ awk '{print $2}' logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ run_mysql 'SHOW VARIABLES LIKE '\''%hostname%'\'';' '-h 10.41.40.32 -P3306 -uroot -proot_password' logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ local 'command=SHOW VARIABLES LIKE '\''%hostname%'\'';' logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ local 'uri=-h 10.41.40.32 -P3306 -uroot -proot_password' logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ local pod= logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | +++ get_client_pod logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ client_pod=mysql-client logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ wait_pod mysql-client logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ local pod=mysql-client logger.go:42: 12:07:33 | haproxy/7-check-primary-failover | ++ set +o xtrace logger.go:42: 12:07:34 | haproxy/7-check-primary-failover | mysql-clienttrue logger.go:42: 12:07:34 | haproxy/7-check-primary-failover | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SHOW VARIABLES LIKE '\''%hostname%'\'';" | mysql -sN -h 10.41.40.32 -P3306 -uroot -proot_password' logger.go:42: 12:07:34 | haproxy/7-check-primary-failover | ++ sed -e 's/mysql: //' logger.go:42: 12:07:34 | haproxy/7-check-primary-failover | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:07:35 | haproxy/7-check-primary-failover | + '[' haproxy-mysql-1 '!=' haproxy-mysql-1 -o haproxy-mysql-1 '!=' haproxy-mysql-1 ']' logger.go:42: 12:07:35 | haproxy/7-check-primary-failover | ++ get_primary_from_label logger.go:42: 12:07:35 | haproxy/7-check-primary-failover | ++ kubectl -n kuttl-test-lucky-shepherd get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 12:07:35 | haproxy/7-check-primary-failover | + primary_pod_from_label=haproxy-mysql-1 logger.go:42: 12:07:35 | haproxy/7-check-primary-failover | ++ get_primary_from_haproxy haproxy-haproxy-0 logger.go:42: 12:07:35 | haproxy/7-check-primary-failover | ++ local haproxy_pod=haproxy-haproxy-0 logger.go:42: 12:07:35 | haproxy/7-check-primary-failover | +++ kubectl -n kuttl-test-lucky-shepherd get pods haproxy-haproxy-0 -o 'jsonpath={.status.podIP}' logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ local haproxy_pod_ip=10.41.41.27 logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ run_mysql 'SHOW VARIABLES LIKE '\''%hostname%'\'';' '-h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ awk '{print $2}' logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ local 'command=SHOW VARIABLES LIKE '\''%hostname%'\'';' logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ local 'uri=-h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ local pod= logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | +++ get_client_pod logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | +++ kubectl -n kuttl-test-lucky-shepherd get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ client_pod=mysql-client logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ wait_pod mysql-client logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ local pod=mysql-client logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ set +o xtrace logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | mysql-clienttrue logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ kubectl -n kuttl-test-lucky-shepherd exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SHOW VARIABLES LIKE '\''%hostname%'\'';" | mysql -sN -h 10.41.41.27 -P3306 -uroot -proot_password' logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ sed -e 's/mysql: //' logger.go:42: 12:07:36 | haproxy/7-check-primary-failover | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:07:37 | haproxy/7-check-primary-failover | + primary_pod_from_haproxy=haproxy-mysql-1 logger.go:42: 12:07:37 | haproxy/7-check-primary-failover | + '[' haproxy-mysql-1 '!=' haproxy-mysql-1 ']' logger.go:42: 12:07:37 | haproxy/7-check-primary-failover | test step completed 7-check-primary-failover logger.go:42: 12:07:37 | haproxy/8-check-password-leak | starting test step 8-check-password-leak logger.go:42: 12:07:37 | haproxy/8-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 12:07:37 | haproxy/8-check-password-leak | + source ../../functions logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ realpath ../../.. logger.go:42: 12:07:37 | haproxy/8-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | ++++ pwd logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:07:37 | haproxy/8-check-password-leak | ++ test_name=haproxy logger.go:42: 12:07:37 | haproxy/8-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:07:37 | haproxy/8-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ GIT_BRANCH=PR-913 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ VERSION=PR-913-7e558330 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export MINIO_VER=5.4.0 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ MINIO_VER=5.4.0 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:07:37 | haproxy/8-check-password-leak | ++++ which gdate logger.go:42: 12:07:37 | haproxy/8-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:07:37 | haproxy/8-check-password-leak | ++++ which date logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ date=/usr/bin/date logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ oc get projects logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ : logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ kubectl get nodes logger.go:42: 12:07:37 | haproxy/8-check-password-leak | +++ grep '^minikube' logger.go:42: 12:07:38 | haproxy/8-check-password-leak | ++ oc get projects logger.go:42: 12:07:38 | haproxy/8-check-password-leak | + check_passwords_leak logger.go:42: 12:07:38 | haproxy/8-check-password-leak | + local secrets logger.go:42: 12:07:38 | haproxy/8-check-password-leak | + local passwords logger.go:42: 12:07:38 | haproxy/8-check-password-leak | + local pods logger.go:42: 12:07:38 | haproxy/8-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 12:07:38 | haproxy/8-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12") or test("namespace")) | not) | .value' logger.go:42: 12:07:38 | haproxy/8-check-password-leak | + secrets= logger.go:42: 12:07:38 | haproxy/8-check-password-leak | + passwords=' ' logger.go:42: 12:07:38 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pods -o name logger.go:42: 12:07:38 | haproxy/8-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + pods='haproxy-haproxy-0 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | haproxy-haproxy-1 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | haproxy-haproxy-2 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | haproxy-mysql-0 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | haproxy-mysql-1 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | haproxy-mysql-2 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | haproxy-orc-0 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | haproxy-orc-1 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | haproxy-orc-2 logger.go:42: 12:07:39 | haproxy/8-check-password-leak | mysql-client' logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + collect_logs kuttl-test-lucky-shepherd logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + local containers logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + local count logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + NS=kuttl-test-lucky-shepherd logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:39 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:39 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-haproxy-0 -c haproxy logger.go:42: 12:07:40 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-0-haproxy.txt logger.go:42: 12:07:40 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-0-haproxy.txt logger.go:42: 12:07:40 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:40 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-haproxy-0 -c mysql-monit logger.go:42: 12:07:41 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-0-mysql-monit.txt logger.go:42: 12:07:41 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-0-mysql-monit.txt logger.go:42: 12:07:41 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:41 | haproxy/8-check-password-leak | logger.go:42: 12:07:41 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:41 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:41 | haproxy/8-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:07:41 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:41 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-haproxy-1 -c haproxy logger.go:42: 12:07:42 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-1-haproxy.txt logger.go:42: 12:07:42 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-1-haproxy.txt logger.go:42: 12:07:42 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:42 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-haproxy-1 -c mysql-monit logger.go:42: 12:07:43 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-1-mysql-monit.txt logger.go:42: 12:07:43 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-1-mysql-monit.txt logger.go:42: 12:07:43 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:43 | haproxy/8-check-password-leak | logger.go:42: 12:07:43 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:43 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:43 | haproxy/8-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:07:43 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:43 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-haproxy-2 -c haproxy logger.go:42: 12:07:44 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-2-haproxy.txt logger.go:42: 12:07:44 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-2-haproxy.txt logger.go:42: 12:07:44 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:44 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-haproxy-2 -c mysql-monit logger.go:42: 12:07:44 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-2-mysql-monit.txt logger.go:42: 12:07:44 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-haproxy-2-mysql-monit.txt logger.go:42: 12:07:44 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:44 | haproxy/8-check-password-leak | logger.go:42: 12:07:44 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:44 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:45 | haproxy/8-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:07:45 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:45 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-0 -c mysql logger.go:42: 12:07:45 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-0-mysql.txt logger.go:42: 12:07:45 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-0-mysql.txt logger.go:42: 12:07:45 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:45 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-0 -c xtrabackup logger.go:42: 12:07:46 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-0-xtrabackup.txt logger.go:42: 12:07:46 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-0-xtrabackup.txt logger.go:42: 12:07:46 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:46 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-0 -c pt-heartbeat logger.go:42: 12:07:47 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-0-pt-heartbeat.txt logger.go:42: 12:07:47 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-0-pt-heartbeat.txt logger.go:42: 12:07:47 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:47 | haproxy/8-check-password-leak | logger.go:42: 12:07:47 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:47 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:48 | haproxy/8-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:07:48 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:48 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-1 -c mysql logger.go:42: 12:07:48 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-1-mysql.txt logger.go:42: 12:07:48 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-1-mysql.txt logger.go:42: 12:07:48 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:48 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-1 -c xtrabackup logger.go:42: 12:07:49 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-1-xtrabackup.txt logger.go:42: 12:07:49 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-1-xtrabackup.txt logger.go:42: 12:07:49 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:49 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-1 -c pt-heartbeat logger.go:42: 12:07:49 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-1-pt-heartbeat.txt logger.go:42: 12:07:49 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-1-pt-heartbeat.txt logger.go:42: 12:07:49 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:49 | haproxy/8-check-password-leak | logger.go:42: 12:07:49 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:49 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:50 | haproxy/8-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:07:50 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:50 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-2 -c mysql logger.go:42: 12:07:51 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-2-mysql.txt logger.go:42: 12:07:51 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-2-mysql.txt logger.go:42: 12:07:51 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:51 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-2 -c xtrabackup logger.go:42: 12:07:51 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-2-xtrabackup.txt logger.go:42: 12:07:51 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-2-xtrabackup.txt logger.go:42: 12:07:51 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:51 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-mysql-2 -c pt-heartbeat logger.go:42: 12:07:52 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-2-pt-heartbeat.txt logger.go:42: 12:07:52 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-mysql-2-pt-heartbeat.txt logger.go:42: 12:07:52 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:52 | haproxy/8-check-password-leak | logger.go:42: 12:07:52 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:52 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:52 | haproxy/8-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:07:52 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:52 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-orc-0 -c orc logger.go:42: 12:07:53 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-0-orc.txt logger.go:42: 12:07:53 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-0-orc.txt logger.go:42: 12:07:53 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:53 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-orc-0 -c mysql-monit logger.go:42: 12:07:54 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-0-mysql-monit.txt logger.go:42: 12:07:54 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-0-mysql-monit.txt logger.go:42: 12:07:54 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:54 | haproxy/8-check-password-leak | logger.go:42: 12:07:54 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:54 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:54 | haproxy/8-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:07:54 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:54 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-orc-1 -c orc logger.go:42: 12:07:55 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-1-orc.txt logger.go:42: 12:07:55 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-1-orc.txt logger.go:42: 12:07:55 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:55 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-orc-1 -c mysql-monit logger.go:42: 12:07:56 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-1-mysql-monit.txt logger.go:42: 12:07:56 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-1-mysql-monit.txt logger.go:42: 12:07:56 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:56 | haproxy/8-check-password-leak | logger.go:42: 12:07:56 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:56 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod haproxy-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:56 | haproxy/8-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:07:56 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:56 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-orc-2 -c orc logger.go:42: 12:07:57 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-2-orc.txt logger.go:42: 12:07:57 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-2-orc.txt logger.go:42: 12:07:57 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:57 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs haproxy-orc-2 -c mysql-monit logger.go:42: 12:07:57 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-2-mysql-monit.txt logger.go:42: 12:07:57 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-haproxy-orc-2-mysql-monit.txt logger.go:42: 12:07:57 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:57 | haproxy/8-check-password-leak | logger.go:42: 12:07:57 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:57 | haproxy/8-check-password-leak | ++ kubectl -n kuttl-test-lucky-shepherd get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:58 | haproxy/8-check-password-leak | + containers=mysql-client logger.go:42: 12:07:58 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:58 | haproxy/8-check-password-leak | + kubectl -n kuttl-test-lucky-shepherd logs mysql-client -c mysql-client logger.go:42: 12:07:58 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:07:58 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:07:58 | haproxy/8-check-password-leak | + echo logger.go:42: 12:07:58 | haproxy/8-check-password-leak | logger.go:42: 12:07:58 | haproxy/8-check-password-leak | + '[' -n ps-operator ']' logger.go:42: 12:07:58 | haproxy/8-check-password-leak | ++ kubectl -n ps-operator get pods -o name logger.go:42: 12:07:58 | haproxy/8-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + pods=percona-server-mysql-operator-68b5d65489-65xh5 logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + collect_logs ps-operator logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + local containers logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + local count logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + NS=ps-operator logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + for p in '$pods' logger.go:42: 12:07:59 | haproxy/8-check-password-leak | ++ kubectl -n ps-operator get pod percona-server-mysql-operator-68b5d65489-65xh5 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + containers=manager logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + for c in '$containers' logger.go:42: 12:07:59 | haproxy/8-check-password-leak | + kubectl -n ps-operator logs percona-server-mysql-operator-68b5d65489-65xh5 -c manager logger.go:42: 12:08:00 | haproxy/8-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/haproxy/logs_output-percona-server-mysql-operator-68b5d65489-65xh5-manager.txt logger.go:42: 12:08:00 | haproxy/8-check-password-leak | logs saved in: /tmp/kuttl/ps/haproxy/logs_output-percona-server-mysql-operator-68b5d65489-65xh5-manager.txt logger.go:42: 12:08:00 | haproxy/8-check-password-leak | + echo logger.go:42: 12:08:00 | haproxy/8-check-password-leak | logger.go:42: 12:08:00 | haproxy/8-check-password-leak | test step completed 8-check-password-leak logger.go:42: 12:08:00 | haproxy/9-disable-haproxy | starting test step 9-disable-haproxy logger.go:42: 12:08:00 | haproxy/9-disable-haproxy | PerconaServerMySQL:kuttl-test-lucky-shepherd/haproxy updated logger.go:42: 12:08:31 | haproxy/9-disable-haproxy | test step completed 9-disable-haproxy logger.go:42: 12:08:31 | haproxy/98-drop-finalizer | starting test step 98-drop-finalizer logger.go:42: 12:08:31 | haproxy/98-drop-finalizer | PerconaServerMySQL:kuttl-test-lucky-shepherd/haproxy updated logger.go:42: 12:08:31 | haproxy/98-drop-finalizer | test step completed 98-drop-finalizer logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions destroy_operator] logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/tests/haproxy logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | ++ test_name=haproxy logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/vars.sh logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-913 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/deploy logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-913/e2e-tests/conf logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/ps/haproxy logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-913 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-913 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export VERSION=PR-913-7e558330 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ VERSION=PR-913-7e558330 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-913-7e558330 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export CERT_MANAGER_VER=1.17.2 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ CERT_MANAGER_VER=1.17.2 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export MINIO_VER=5.4.0 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ MINIO_VER=5.4.0 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-913/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | ++++ which date logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ date=/usr/bin/date logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ oc get projects logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ : logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ kubectl get nodes logger.go:42: 12:08:31 | haproxy/99-remove-cluster-gracefully | +++ grep '^minikube' logger.go:42: 12:08:32 | haproxy/99-remove-cluster-gracefully | ++ oc get projects logger.go:42: 12:08:32 | haproxy/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 12:08:32 | haproxy/99-remove-cluster-gracefully | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 12:08:32 | haproxy/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:08:32 | haproxy/99-remove-cluster-gracefully | deployment.apps "percona-server-mysql-operator" force deleted logger.go:42: 12:08:33 | haproxy/99-remove-cluster-gracefully | + [[ -n ps-operator ]] logger.go:42: 12:08:33 | haproxy/99-remove-cluster-gracefully | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 12:08:33 | haproxy/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:08:33 | haproxy/99-remove-cluster-gracefully | namespace "ps-operator" force deleted logger.go:42: 12:08:39 | haproxy/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 12:08:39 | haproxy | haproxy events from ns kuttl-test-lucky-shepherd: logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:18 +0000 UTC Normal Pod mysql-client Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/mysql-client to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-3hrr default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:19 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulled Container image "percona/percona-server:8.0.33" already present on machine kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:19 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Created Created container: mysql-client kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:19 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Started Started container mysql-client kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:30 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:30 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:30 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-lucky-shepherd/datadir-haproxy-mysql-0" pd.csi.storage.gke.io_gke-76cff7750163472fab0a-53b2-f94c-vm_12642003-65ae-4542-9435-2f5b09778155 logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:30 +0000 UTC Normal StatefulSet.apps haproxy-mysql SuccessfulCreate create Claim datadir-haproxy-mysql-0 Pod haproxy-mysql-0 in StatefulSet haproxy-mysql success statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:30 +0000 UTC Normal StatefulSet.apps haproxy-mysql SuccessfulCreate create Pod haproxy-mysql-0 in StatefulSet haproxy-mysql successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:30 +0000 UTC Normal Pod haproxy-orc-0 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-orc-0 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-3hrr default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:30 +0000 UTC Normal StatefulSet.apps haproxy-orc SuccessfulCreate create Pod haproxy-orc-0 in StatefulSet haproxy-orc successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:31 +0000 UTC Normal Pod haproxy-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:31 +0000 UTC Normal Pod haproxy-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 282ms (282ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:31 +0000 UTC Normal Pod haproxy-orc-0.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:31 +0000 UTC Normal Pod haproxy-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:33 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:33 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 231ms (231ms including waiting). Image size: 72477672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:33 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:33 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:33 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:33 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 193ms (193ms including waiting). Image size: 72477672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:33 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:33 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:34 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-aa878fe8-1ffa-41ae-8edc-c6cd1e82cf95 pd.csi.storage.gke.io_gke-76cff7750163472fab0a-53b2-f94c-vm_12642003-65ae-4542-9435-2f5b09778155 logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:34 +0000 UTC Normal Pod haproxy-mysql-0 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-mysql-0 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-qff5 default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:42 +0000 UTC Normal Pod haproxy-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-aa878fe8-1ffa-41ae-8edc-c6cd1e82cf95" attachdetach-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:45 +0000 UTC Normal Pod haproxy-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:46 +0000 UTC Normal Pod haproxy-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 290ms (290ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:46 +0000 UTC Normal Pod haproxy-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:46 +0000 UTC Normal Pod haproxy-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:47 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:47 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 248ms (248ms including waiting). Image size: 436542574 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:47 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:47 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:47 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:48 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 275ms (275ms including waiting). Image size: 445622672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:48 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:48 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:48 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:48 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 269ms (269ms including waiting). Image size: 132952447 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:48 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:03:48 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:06 +0000 UTC Normal Pod haproxy-orc-1 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-orc-1 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-qff5 default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:06 +0000 UTC Normal StatefulSet.apps haproxy-orc SuccessfulCreate create Pod haproxy-orc-1 in StatefulSet haproxy-orc successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:07 +0000 UTC Warning Pod haproxy-orc-1 FailedMount MountVolume.SetUp failed for volume "custom" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:08 +0000 UTC Normal Pod haproxy-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:08 +0000 UTC Normal Pod haproxy-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 264ms (264ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:08 +0000 UTC Normal Pod haproxy-orc-1.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:08 +0000 UTC Normal Pod haproxy-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:10 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:10 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 244ms (244ms including waiting). Image size: 72477672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:10 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:11 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:11 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:11 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 249ms (249ms including waiting). Image size: 72477672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:11 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:11 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:20 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:20 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:20 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-lucky-shepherd/datadir-haproxy-mysql-1" pd.csi.storage.gke.io_gke-76cff7750163472fab0a-53b2-f94c-vm_12642003-65ae-4542-9435-2f5b09778155 logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:20 +0000 UTC Normal StatefulSet.apps haproxy-mysql SuccessfulCreate create Claim datadir-haproxy-mysql-1 Pod haproxy-mysql-1 in StatefulSet haproxy-mysql success statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:20 +0000 UTC Normal StatefulSet.apps haproxy-mysql SuccessfulCreate create Pod haproxy-mysql-1 in StatefulSet haproxy-mysql successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:23 +0000 UTC Normal Pod haproxy-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-haproxy-0 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-3hrr default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:23 +0000 UTC Normal StatefulSet.apps haproxy-haproxy SuccessfulCreate create Pod haproxy-haproxy-0 in StatefulSet haproxy-haproxy successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:24 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-23ae751a-2ad4-4c23-934b-2179bba750ec pd.csi.storage.gke.io_gke-76cff7750163472fab0a-53b2-f94c-vm_12642003-65ae-4542-9435-2f5b09778155 logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:24 +0000 UTC Normal Pod haproxy-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:24 +0000 UTC Normal Pod haproxy-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 211ms (211ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:24 +0000 UTC Normal Pod haproxy-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:24 +0000 UTC Normal Pod haproxy-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:24 +0000 UTC Normal Pod haproxy-mysql-1 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-mysql-1 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-3hrr default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:26 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:26 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 199ms (199ms including waiting). Image size: 102736162 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:26 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:26 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:26 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:26 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 256ms (256ms including waiting). Image size: 102736162 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:26 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:27 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:27 +0000 UTC Normal Pod haproxy-haproxy-1 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-haproxy-1 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-qff5 default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:27 +0000 UTC Normal StatefulSet.apps haproxy-haproxy SuccessfulCreate create Pod haproxy-haproxy-1 in StatefulSet haproxy-haproxy successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:28 +0000 UTC Normal Pod haproxy-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:28 +0000 UTC Normal Pod haproxy-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 271ms (271ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:28 +0000 UTC Normal Pod haproxy-haproxy-1.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:28 +0000 UTC Normal Pod haproxy-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:30 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:30 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 178ms (178ms including waiting). Image size: 102736162 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:30 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:31 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:31 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:31 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 261ms (261ms including waiting). Image size: 102736162 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:31 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:31 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:31 +0000 UTC Normal Pod haproxy-haproxy-2 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-haproxy-2 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-rg7s default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:31 +0000 UTC Normal StatefulSet.apps haproxy-haproxy SuccessfulCreate create Pod haproxy-haproxy-2 in StatefulSet haproxy-haproxy successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:32 +0000 UTC Normal Pod haproxy-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:32 +0000 UTC Normal Pod haproxy-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 259ms (259ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:32 +0000 UTC Normal Pod haproxy-haproxy-2.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:32 +0000 UTC Normal Pod haproxy-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-23ae751a-2ad4-4c23-934b-2179bba750ec" attachdetach-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:33 +0000 UTC Normal Pod haproxy-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:33 +0000 UTC Normal Pod haproxy-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:34 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:34 +0000 UTC Normal Pod haproxy-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 314ms (314ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:34 +0000 UTC Normal Pod haproxy-mysql-1.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:34 +0000 UTC Normal Pod haproxy-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:35 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 234ms (234ms including waiting). Image size: 102736162 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:35 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:35 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:35 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:35 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 230ms (230ms including waiting). Image size: 102736162 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:35 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:35 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:36 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:36 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 249ms (249ms including waiting). Image size: 436542574 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:36 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:36 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:36 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:36 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 243ms (243ms including waiting). Image size: 445622672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:37 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:37 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:37 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:37 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 239ms (239ms including waiting). Image size: 132952447 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:37 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:37 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:43 +0000 UTC Normal Pod haproxy-orc-2 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-orc-2 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-rg7s default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:43 +0000 UTC Normal Pod haproxy-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:43 +0000 UTC Normal StatefulSet.apps haproxy-orc SuccessfulCreate create Pod haproxy-orc-2 in StatefulSet haproxy-orc successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:44 +0000 UTC Normal Pod haproxy-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 261ms (261ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:44 +0000 UTC Normal Pod haproxy-orc-2.spec.initContainers{orc-init} Created Created container: orc-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:44 +0000 UTC Normal Pod haproxy-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:45 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:46 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 184ms (184ms including waiting). Image size: 72477672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:46 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{orc} Created Created container: orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:46 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:46 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:46 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 247ms (247ms including waiting). Image size: 72477672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:46 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:46 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:54 +0000 UTC Warning Pod haproxy-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/28 12:04:53 Waiting for MySQL ready state 2025/05/28 12:04:53 MySQL is ready 2025/05/28 12:04:53 Peers: [3164336539653264.haproxy-mysql-unready.kuttl-test-lucky-shepherd 3538333135326665.haproxy-mysql-unready.kuttl-test-lucky-shepherd] 2025/05/28 12:04:53 FQDN: haproxy-mysql-1.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:04:53 Primary: haproxy-mysql-0.haproxy-mysql.kuttl-test-lucky-shepherd Replicas: [haproxy-mysql-1.haproxy-mysql.kuttl-test-lucky-shepherd] 2025/05/28 12:04:53 lookup haproxy-mysql-1 [10.41.41.28] 2025/05/28 12:04:53 PodIP: 10.41.41.28 2025/05/28 12:04:53 lookup haproxy-mysql-0.haproxy-mysql.kuttl-test-lucky-shepherd [10.41.42.26] 2025/05/28 12:04:53 PrimaryIP: 10.41.42.26 2025/05/28 12:04:53 Donor: haproxy-mysql-0.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:04:53 Opening connection to 10.41.41.28 2025/05/28 12:04:53 Clone required: true 2025/05/28 12:04:53 Checking if a clone in progress 2025/05/28 12:04:53 Clone in progress: false 2025/05/28 12:04:53 Cloning from haproxy-mysql-0.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:04:54 Clone finished. Restarting container... kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:54 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:04:58 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 243ms (244ms including waiting). Image size: 436542574 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:28 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:28 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:28 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-lucky-shepherd/datadir-haproxy-mysql-2" pd.csi.storage.gke.io_gke-76cff7750163472fab0a-53b2-f94c-vm_12642003-65ae-4542-9435-2f5b09778155 logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:28 +0000 UTC Normal StatefulSet.apps haproxy-mysql SuccessfulCreate create Claim datadir-haproxy-mysql-2 Pod haproxy-mysql-2 in StatefulSet haproxy-mysql success statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:28 +0000 UTC Normal StatefulSet.apps haproxy-mysql SuccessfulCreate create Pod haproxy-mysql-2 in StatefulSet haproxy-mysql successful statefulset-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:32 +0000 UTC Normal PersistentVolumeClaim datadir-haproxy-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-d0c1007a-5efe-474e-9f59-0d2df3a1446b pd.csi.storage.gke.io_gke-76cff7750163472fab0a-53b2-f94c-vm_12642003-65ae-4542-9435-2f5b09778155 logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:32 +0000 UTC Normal Pod haproxy-mysql-2 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-mysql-2 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-rg7s default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:40 +0000 UTC Normal Pod haproxy-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d0c1007a-5efe-474e-9f59-0d2df3a1446b" attachdetach-controller logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:41 +0000 UTC Normal Pod haproxy-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:42 +0000 UTC Normal Pod haproxy-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 232ms (232ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:42 +0000 UTC Normal Pod haproxy-mysql-2.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:42 +0000 UTC Normal Pod haproxy-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 174ms (174ms including waiting). Image size: 436542574 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 245ms (245ms including waiting). Image size: 445622672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:44 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 217ms (217ms including waiting). Image size: 132952447 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:45 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:05:45 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:06:02 +0000 UTC Warning Pod haproxy-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/28 12:06:01 Waiting for MySQL ready state 2025/05/28 12:06:01 MySQL is ready 2025/05/28 12:06:01 Peers: [3164336539653264.haproxy-mysql-unready.kuttl-test-lucky-shepherd 3365613239643065.haproxy-mysql-unready.kuttl-test-lucky-shepherd 3538333135326665.haproxy-mysql-unready.kuttl-test-lucky-shepherd] 2025/05/28 12:06:01 FQDN: haproxy-mysql-2.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:06:01 Primary: haproxy-mysql-0.haproxy-mysql.kuttl-test-lucky-shepherd Replicas: [haproxy-mysql-1.haproxy-mysql.kuttl-test-lucky-shepherd haproxy-mysql-2.haproxy-mysql.kuttl-test-lucky-shepherd] 2025/05/28 12:06:01 lookup haproxy-mysql-2 [10.41.40.34] 2025/05/28 12:06:01 PodIP: 10.41.40.34 2025/05/28 12:06:01 lookup haproxy-mysql-0.haproxy-mysql.kuttl-test-lucky-shepherd [10.41.42.26] 2025/05/28 12:06:01 PrimaryIP: 10.41.42.26 2025/05/28 12:06:01 Donor: haproxy-mysql-1.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:06:01 Opening connection to 10.41.40.34 2025/05/28 12:06:01 Clone required: true 2025/05/28 12:06:01 Checking if a clone in progress 2025/05/28 12:06:01 Clone in progress: false 2025/05/28 12:06:01 Cloning from haproxy-mysql-1.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:06:02 Clone finished. Restarting container... kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:06:02 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:06:06 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 226ms (226ms including waiting). Image size: 436542574 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:05 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:05 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:05 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:05 +0000 UTC Warning Pod haproxy-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/05/28 12:07:05 MySQL state is not ready... kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:10 +0000 UTC Warning Pod haproxy-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/05/28 12:07:10 MySQL state is not ready... kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:15 +0000 UTC Warning Pod haproxy-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/05/28 12:07:15 MySQL state is not ready... kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:20 +0000 UTC Warning Pod haproxy-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:25 +0000 UTC Normal Pod haproxy-mysql-0 Binding Scheduled Successfully assigned kuttl-test-lucky-shepherd/haproxy-mysql-0 to gke-jen-ps-913-7e558330--default-pool-f34ffbe2-qff5 default-scheduler logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:27 +0000 UTC Normal Pod haproxy-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-913-7e558330" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:27 +0000 UTC Normal Pod haproxy-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-913-7e558330" in 290ms (290ms including waiting). Image size: 108786525 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:27 +0000 UTC Normal Pod haproxy-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:27 +0000 UTC Normal Pod haproxy-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 239ms (239ms including waiting). Image size: 436542574 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 208ms (208ms including waiting). Image size: 445622672 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:29 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:30 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 230ms (230ms including waiting). Image size: 132952447 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:30 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:30 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:48 +0000 UTC Warning Pod haproxy-mysql-0.spec.containers{mysql} Unhealthy Startup probe failed: 2025/05/28 12:07:46 Waiting for MySQL ready state 2025/05/28 12:07:46 MySQL is ready 2025/05/28 12:07:46 Peers: [3133663866666132.haproxy-mysql-unready.kuttl-test-lucky-shepherd 3365613239643065.haproxy-mysql-unready.kuttl-test-lucky-shepherd 3538333135326665.haproxy-mysql-unready.kuttl-test-lucky-shepherd] 2025/05/28 12:07:46 FQDN: haproxy-mysql-0.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:07:46 Primary: haproxy-mysql-1.haproxy-mysql.kuttl-test-lucky-shepherd Replicas: [haproxy-mysql-0.haproxy-mysql.kuttl-test-lucky-shepherd haproxy-mysql-2.haproxy-mysql.kuttl-test-lucky-shepherd] 2025/05/28 12:07:46 lookup haproxy-mysql-0 [10.41.42.29] 2025/05/28 12:07:46 PodIP: 10.41.42.29 2025/05/28 12:07:46 lookup haproxy-mysql-1.haproxy-mysql.kuttl-test-lucky-shepherd [10.41.41.28] 2025/05/28 12:07:46 PrimaryIP: 10.41.41.28 2025/05/28 12:07:47 Donor: haproxy-mysql-2.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:07:47 Opening connection to 10.41.42.29 2025/05/28 12:07:47 Clone required: true 2025/05/28 12:07:47 Checking if a clone in progress 2025/05/28 12:07:47 Clone in progress: false 2025/05/28 12:07:47 Cloning from haproxy-mysql-2.haproxy-mysql.kuttl-test-lucky-shepherd 2025/05/28 12:07:48 Clone finished. Restarting container... kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:48 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:07:52 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 232ms (232ms including waiting). Image size: 436542574 bytes. kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:01 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:01 +0000 UTC Normal Pod haproxy-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:01 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:01 +0000 UTC Normal Pod haproxy-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:01 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:01 +0000 UTC Normal Pod haproxy-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 12:08:39 | haproxy | 2025-05-28 12:08:32 +0000 UTC Normal Pod haproxy-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 12:08:39 | haproxy | Deleting namespace: kuttl-test-lucky-shepherd === NAME kuttl harness.go:407: run tests finished harness.go:515: cleaning up harness.go:572: removing temp folder: "" --- PASS: kuttl (377.51s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/haproxy (377.07s) PASS