=== RUN kuttl harness.go:459: starting setup harness.go:254: running tests using configured kubeconfig. harness.go:277: Successful connection to cluster at: https://34.41.167.132 harness.go:362: running tests harness.go:74: going to run test suite with timeout of 180 seconds for each step harness.go:374: testsuite: e2e-tests/tests has 46 tests === RUN kuttl/harness === RUN kuttl/harness/one-pod === PAUSE kuttl/harness/one-pod === CONT kuttl/harness/one-pod logger.go:42: 02:13:52 | one-pod | Creating namespace: kuttl-test-meet-gnat logger.go:42: 02:13:53 | one-pod/0-deploy-operator | starting test step 0-deploy-operator logger.go:42: 02:13:53 | one-pod/0-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep apply_s3_storage_secrets deploy_operator deploy_tls_cluster_secrets deploy_client deploy_minio] logger.go:42: 02:13:53 | one-pod/0-deploy-operator | + source ../../functions logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ realpath ../../.. logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++++ pwd logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/tests/one-pod logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++ test_name=one-pod logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/vars.sh logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/one-pod logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/one-pod logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export GIT_BRANCH=PR-1164 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ GIT_BRANCH=PR-1164 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export VERSION=PR-1164-9bd7f24f logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ VERSION=PR-1164-9bd7f24f logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ [[ -z 8.0 ]] logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export MYSQL_VERSION=8.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ MYSQL_VERSION=8.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export MINIO_VER=5.4.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ MINIO_VER=5.4.0 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ export VAULT_VER=0.16.1 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ VAULT_VER=0.16.1 logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++++ which gdate logger.go:42: 02:13:53 | one-pod/0-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++++ which date logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ date=/usr/sbin/date logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ oc get projects logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ : logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ kubectl get nodes logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ grep '^minikube' logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ which gsed logger.go:42: 02:13:53 | one-pod/0-deploy-operator | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ which sed logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++ sed=/usr/sbin/sed logger.go:42: 02:13:53 | one-pod/0-deploy-operator | ++ oc get projects logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ kubectl version -o json logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ jq -r .serverVersion.gitVersion logger.go:42: 02:13:53 | one-pod/0-deploy-operator | +++ grep '\-eks\-' logger.go:42: 02:13:53 | one-pod/0-deploy-operator | grep: warning: stray \ before - logger.go:42: 02:13:54 | one-pod/0-deploy-operator | Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 logger.go:42: 02:13:54 | one-pod/0-deploy-operator | ++ '[' ']' logger.go:42: 02:13:54 | one-pod/0-deploy-operator | ++ EKS=0 logger.go:42: 02:13:54 | one-pod/0-deploy-operator | + init_temp_dir logger.go:42: 02:13:54 | one-pod/0-deploy-operator | + rm -rf /tmp/kuttl/ps/one-pod logger.go:42: 02:13:54 | one-pod/0-deploy-operator | + mkdir -p /tmp/kuttl/ps/one-pod logger.go:42: 02:13:54 | one-pod/0-deploy-operator | + apply_s3_storage_secrets logger.go:42: 02:13:54 | one-pod/0-deploy-operator | + apply_minio_secret logger.go:42: 02:13:54 | one-pod/0-deploy-operator | + kubectl -n kuttl-test-meet-gnat apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf/minio-secret.yml logger.go:42: 02:13:55 | one-pod/0-deploy-operator | secret/minio-secret created logger.go:42: 02:13:55 | one-pod/0-deploy-operator | + kubectl -n kuttl-test-meet-gnat apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf/cloud-secret.yml logger.go:42: 02:13:56 | one-pod/0-deploy-operator | secret/aws-s3-secret created logger.go:42: 02:13:56 | one-pod/0-deploy-operator | secret/gcp-cs-secret created logger.go:42: 02:13:57 | one-pod/0-deploy-operator | secret/azure-secret created logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + deploy_operator logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + destroy_operator logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 02:13:57 | one-pod/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 02:13:57 | one-pod/0-deploy-operator | Error from server (NotFound): deployments.apps "percona-server-mysql-operator" not found logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + true logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 02:13:57 | one-pod/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 02:13:57 | one-pod/0-deploy-operator | Error from server (NotFound): namespaces "ps-operator" not found logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + true logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + create_namespace ps-operator logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + local namespace=ps-operator logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + [[ -n '' ]] logger.go:42: 02:13:57 | one-pod/0-deploy-operator | + kubectl delete namespace ps-operator --ignore-not-found logger.go:42: 02:13:58 | one-pod/0-deploy-operator | + kubectl wait --for=delete namespace ps-operator logger.go:42: 02:13:58 | one-pod/0-deploy-operator | + kubectl create namespace ps-operator logger.go:42: 02:13:59 | one-pod/0-deploy-operator | namespace/ps-operator created logger.go:42: 02:13:59 | one-pod/0-deploy-operator | + apply_crd logger.go:42: 02:13:59 | one-pod/0-deploy-operator | + kubectl -n ps-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy/crd.yaml logger.go:42: 02:14:00 | one-pod/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 02:14:00 | one-pod/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 02:14:02 | one-pod/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 02:14:02 | one-pod/0-deploy-operator | + apply_rbac logger.go:42: 02:14:02 | one-pod/0-deploy-operator | + local rbac_file logger.go:42: 02:14:02 | one-pod/0-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 02:14:02 | one-pod/0-deploy-operator | + rbac_file=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy/cw-rbac.yaml logger.go:42: 02:14:02 | one-pod/0-deploy-operator | + kubectl -n ps-operator apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy/cw-rbac.yaml logger.go:42: 02:14:03 | one-pod/0-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 02:14:03 | one-pod/0-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 02:14:03 | one-pod/0-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged logger.go:42: 02:14:04 | one-pod/0-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 02:14:04 | one-pod/0-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator unchanged logger.go:42: 02:14:04 | one-pod/0-deploy-operator | + local operator_file logger.go:42: 02:14:04 | one-pod/0-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 02:14:04 | one-pod/0-deploy-operator | + operator_file=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy/cw-operator.yaml logger.go:42: 02:14:04 | one-pod/0-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 02:14:04 | one-pod/0-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "VERBOSE"' logger.go:42: 02:14:04 | one-pod/0-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f logger.go:42: 02:14:04 | one-pod/0-deploy-operator | + kubectl -n ps-operator apply -f - logger.go:42: 02:14:04 | one-pod/0-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy/cw-operator.yaml logger.go:42: 02:14:05 | one-pod/0-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 02:14:06 | one-pod/0-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 02:14:06 | one-pod/0-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 02:14:06 | one-pod/0-deploy-operator | + kubectl -n kuttl-test-meet-gnat apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf/ssl-secret.yaml logger.go:42: 02:14:07 | one-pod/0-deploy-operator | secret/test-ssl created logger.go:42: 02:14:07 | one-pod/0-deploy-operator | + deploy_client logger.go:42: 02:14:07 | one-pod/0-deploy-operator | + kubectl -n kuttl-test-meet-gnat apply -f - logger.go:42: 02:14:07 | one-pod/0-deploy-operator | ++ printf '.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:14:07 | one-pod/0-deploy-operator | + yq eval '.spec.containers[0].image="perconalab/percona-server-mysql-operator:main-psmysql8.0"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf/client.yaml logger.go:42: 02:14:08 | one-pod/0-deploy-operator | pod/mysql-client created logger.go:42: 02:14:08 | one-pod/0-deploy-operator | + deploy_minio logger.go:42: 02:14:08 | one-pod/0-deploy-operator | + local storage=2G logger.go:42: 02:14:08 | one-pod/0-deploy-operator | + local access_key logger.go:42: 02:14:08 | one-pod/0-deploy-operator | + local secret_key logger.go:42: 02:14:08 | one-pod/0-deploy-operator | ++ kubectl -n kuttl-test-meet-gnat get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 02:14:08 | one-pod/0-deploy-operator | ++ base64 -d logger.go:42: 02:14:08 | one-pod/0-deploy-operator | + access_key=some-access-key logger.go:42: 02:14:08 | one-pod/0-deploy-operator | ++ kubectl -n kuttl-test-meet-gnat get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 02:14:08 | one-pod/0-deploy-operator | ++ base64 -d logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + secret_key=some-secret-key logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + helm uninstall -n kuttl-test-meet-gnat minio-service logger.go:42: 02:14:09 | one-pod/0-deploy-operator | Error: uninstall: Release not loaded: minio-service: release: not found logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + : logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + helm repo remove minio logger.go:42: 02:14:09 | one-pod/0-deploy-operator | Error: no repositories configured logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + : logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + helm repo add minio https://charts.min.io/ logger.go:42: 02:14:09 | one-pod/0-deploy-operator | "minio" has been added to your repositories logger.go:42: 02:14:09 | one-pod/0-deploy-operator | +++ printf %q some-access-key logger.go:42: 02:14:09 | one-pod/0-deploy-operator | ++ printf %q some-access-key logger.go:42: 02:14:09 | one-pod/0-deploy-operator | +++ printf %q some-secret-key logger.go:42: 02:14:09 | one-pod/0-deploy-operator | ++ printf %q some-secret-key logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + retry 10 60 helm install minio-service -n kuttl-test-meet-gnat --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + local max=10 logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + local delay=60 logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + shift 2 logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + local n=1 logger.go:42: 02:14:09 | one-pod/0-deploy-operator | + helm install minio-service -n kuttl-test-meet-gnat --version 5.4.0 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 02:14:43 | one-pod/0-deploy-operator | NAME: minio-service logger.go:42: 02:14:43 | one-pod/0-deploy-operator | LAST DEPLOYED: Wed Nov 12 02:14:10 2025 logger.go:42: 02:14:43 | one-pod/0-deploy-operator | NAMESPACE: kuttl-test-meet-gnat logger.go:42: 02:14:43 | one-pod/0-deploy-operator | STATUS: deployed logger.go:42: 02:14:43 | one-pod/0-deploy-operator | REVISION: 1 logger.go:42: 02:14:43 | one-pod/0-deploy-operator | TEST SUITE: None logger.go:42: 02:14:43 | one-pod/0-deploy-operator | NOTES: logger.go:42: 02:14:43 | one-pod/0-deploy-operator | MinIO can be accessed via port 9000 on the following DNS name from within your cluster: logger.go:42: 02:14:43 | one-pod/0-deploy-operator | minio-service.kuttl-test-meet-gnat.cluster.local logger.go:42: 02:14:43 | one-pod/0-deploy-operator | logger.go:42: 02:14:43 | one-pod/0-deploy-operator | To access MinIO from localhost, run the below commands: logger.go:42: 02:14:43 | one-pod/0-deploy-operator | logger.go:42: 02:14:43 | one-pod/0-deploy-operator | 1. export POD_NAME=$(kubectl get pods --namespace kuttl-test-meet-gnat -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") logger.go:42: 02:14:43 | one-pod/0-deploy-operator | logger.go:42: 02:14:43 | one-pod/0-deploy-operator | 2. kubectl port-forward $POD_NAME 9000 --namespace kuttl-test-meet-gnat logger.go:42: 02:14:43 | one-pod/0-deploy-operator | logger.go:42: 02:14:43 | one-pod/0-deploy-operator | Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ logger.go:42: 02:14:43 | one-pod/0-deploy-operator | logger.go:42: 02:14:43 | one-pod/0-deploy-operator | You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: logger.go:42: 02:14:43 | one-pod/0-deploy-operator | logger.go:42: 02:14:43 | one-pod/0-deploy-operator | 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart logger.go:42: 02:14:43 | one-pod/0-deploy-operator | logger.go:42: 02:14:43 | one-pod/0-deploy-operator | 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace kuttl-test-meet-gnat minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace kuttl-test-meet-gnat minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 logger.go:42: 02:14:43 | one-pod/0-deploy-operator | logger.go:42: 02:14:43 | one-pod/0-deploy-operator | 3. mc ls minio-service-local logger.go:42: 02:14:43 | one-pod/0-deploy-operator | ++ kubectl -n kuttl-test-meet-gnat get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' logger.go:42: 02:14:43 | one-pod/0-deploy-operator | + MINIO_POD=minio-service-d9589b474-vzwxq logger.go:42: 02:14:43 | one-pod/0-deploy-operator | + wait_pod minio-service-d9589b474-vzwxq logger.go:42: 02:14:43 | one-pod/0-deploy-operator | + local pod=minio-service-d9589b474-vzwxq logger.go:42: 02:14:43 | one-pod/0-deploy-operator | + local ns=kuttl-test-meet-gnat logger.go:42: 02:14:43 | one-pod/0-deploy-operator | + set +o xtrace logger.go:42: 02:14:43 | one-pod/0-deploy-operator | minio-service-d9589b474-vzwxqtrue logger.go:42: 02:14:43 | one-pod/0-deploy-operator | + kubectl -n kuttl-test-meet-gnat run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID='\''some-access-key'\'' AWS_SECRET_ACCESS_KEY='\''some-secret-key'\'' AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' logger.go:42: 02:14:47 | one-pod/0-deploy-operator | All commands and output from this session will be recorded in container logs, including credentials and sensitive information passed through the command prompt. logger.go:42: 02:14:47 | one-pod/0-deploy-operator | If you don't see a command prompt, try pressing enter. logger.go:42: 02:14:49 | one-pod/0-deploy-operator | pod "aws-cli" deleted from kuttl-test-meet-gnat namespace logger.go:42: 02:14:50 | one-pod/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 02:14:50 | one-pod/0-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 02:14:50 | one-pod/0-deploy-operator | INFO Found 1 resource(s). logger.go:42: 02:14:50 | one-pod/0-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 02:14:50 | one-pod/0-deploy-operator | percona-server-mysql-operator ps-operator 1 logger.go:42: 02:14:50 | one-pod/0-deploy-operator | ASSERT PASS logger.go:42: 02:14:50 | one-pod/0-deploy-operator | test step completed 0-deploy-operator logger.go:42: 02:14:50 | one-pod/1-create-cluster | starting test step 1-create-cluster logger.go:42: 02:14:50 | one-pod/1-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval ".spec.unsafeFlags.mysqlSize=true" - \ | yq eval ".spec.unsafeFlags.proxySize=true" - \ | yq eval ".spec.unsafeFlags.orchestratorSize=true" - \ | yq eval ".spec.mysql.size=1" - \ | yq eval ".spec.proxy.haproxy.enabled=true" - \ | yq eval ".spec.proxy.haproxy.size=1" - \ | yq eval ".spec.orchestrator.enabled=true" - \ | yq eval ".spec.orchestrator.size=1" - \ | yq eval '.spec.backup.storages.minio.type="s3"' - \ | yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - \ | yq eval ".spec.backup.storages.minio.s3.endpointUrl=\"http://minio-service.${NAMESPACE}:9000\"" - \ | yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 02:14:50 | one-pod/1-create-cluster | + source ../../functions logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ realpath ../../.. logger.go:42: 02:14:50 | one-pod/1-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:14:50 | one-pod/1-create-cluster | ++++ pwd logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/tests/one-pod logger.go:42: 02:14:50 | one-pod/1-create-cluster | ++ test_name=one-pod logger.go:42: 02:14:50 | one-pod/1-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/vars.sh logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/one-pod logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/one-pod logger.go:42: 02:14:50 | one-pod/1-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export GIT_BRANCH=PR-1164 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ GIT_BRANCH=PR-1164 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export VERSION=PR-1164-9bd7f24f logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ VERSION=PR-1164-9bd7f24f logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ [[ -z 8.0 ]] logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export MYSQL_VERSION=8.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ MYSQL_VERSION=8.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export MINIO_VER=5.4.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ MINIO_VER=5.4.0 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ export VAULT_VER=0.16.1 logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ VAULT_VER=0.16.1 logger.go:42: 02:14:50 | one-pod/1-create-cluster | ++++ which gdate logger.go:42: 02:14:50 | one-pod/1-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 02:14:50 | one-pod/1-create-cluster | ++++ which date logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ date=/usr/sbin/date logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ oc get projects logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ : logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ kubectl get nodes logger.go:42: 02:14:50 | one-pod/1-create-cluster | +++ grep '^minikube' logger.go:42: 02:14:51 | one-pod/1-create-cluster | +++ which gsed logger.go:42: 02:14:51 | one-pod/1-create-cluster | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 02:14:51 | one-pod/1-create-cluster | +++ which sed logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ sed=/usr/sbin/sed logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ oc get projects logger.go:42: 02:14:51 | one-pod/1-create-cluster | +++ kubectl version -o json logger.go:42: 02:14:51 | one-pod/1-create-cluster | +++ jq -r .serverVersion.gitVersion logger.go:42: 02:14:51 | one-pod/1-create-cluster | +++ grep '\-eks\-' logger.go:42: 02:14:51 | one-pod/1-create-cluster | grep: warning: stray \ before - logger.go:42: 02:14:51 | one-pod/1-create-cluster | Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ '[' ']' logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ EKS=0 logger.go:42: 02:14:51 | one-pod/1-create-cluster | + get_cr logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local name_suffix= logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local image_mysql=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local image_backup=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local image_orchestrator=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local image_router=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local image_toolkit=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local image_haproxy=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local image_pmm_client=perconalab/pmm-client:3-dev-latest logger.go:42: 02:14:51 | one-pod/1-create-cluster | + local cr_file=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy/cr.yaml logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.unsafeFlags.mysqlSize=true - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.unsafeFlags.proxySize=true - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.unsafeFlags.orchestratorSize=true - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.mysql.size=1 - logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.metadata.name="%s"' one-pod logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval 'del(.spec.secretsName)' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.metadata.name="one-pod"' /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy/cr.yaml logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.proxy.haproxy.size=1 - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + kubectl -n kuttl-test-meet-gnat apply -f - logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.spec.initContainer.image="%s"' perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.backup.storages.minio.type="s3"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.orchestrator.size=1 - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.mysql.gracePeriod=30 - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.initContainer.image="perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.backup.storages.minio.s3.endpointUrl="http://minio-service.kuttl-test-meet-gnat:9000"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + '[' -n '' ']' logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval - logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:3-dev-latest logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:3-dev-latest"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup8.0"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql8.0"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router8.0"' - logger.go:42: 02:14:51 | one-pod/1-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 02:14:53 | one-pod/1-create-cluster | perconaservermysql.ps.percona.com/one-pod created logger.go:42: 02:16:27 | one-pod/1-create-cluster | test step completed 1-create-cluster logger.go:42: 02:16:27 | one-pod/2-write-data | starting test step 2-write-data logger.go:42: 02:16:27 | one-pod/2-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions sleep 5 run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name))" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name))"] logger.go:42: 02:16:27 | one-pod/2-write-data | + source ../../functions logger.go:42: 02:16:27 | one-pod/2-write-data | +++ realpath ../../.. logger.go:42: 02:16:27 | one-pod/2-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:16:27 | one-pod/2-write-data | ++++ pwd logger.go:42: 02:16:27 | one-pod/2-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/tests/one-pod logger.go:42: 02:16:27 | one-pod/2-write-data | ++ test_name=one-pod logger.go:42: 02:16:27 | one-pod/2-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/vars.sh logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy logger.go:42: 02:16:27 | one-pod/2-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/deploy logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests logger.go:42: 02:16:27 | one-pod/2-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf logger.go:42: 02:16:27 | one-pod/2-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/e2e-tests/conf logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/one-pod logger.go:42: 02:16:27 | one-pod/2-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/one-pod logger.go:42: 02:16:27 | one-pod/2-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export GIT_BRANCH=PR-1164 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ GIT_BRANCH=PR-1164 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export VERSION=PR-1164-9bd7f24f logger.go:42: 02:16:27 | one-pod/2-write-data | +++ VERSION=PR-1164-9bd7f24f logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f logger.go:42: 02:16:27 | one-pod/2-write-data | +++ [[ -z 8.0 ]] logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export MYSQL_VERSION=8.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ MYSQL_VERSION=8.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql8.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup8.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router8.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export PMM_SERVER_VERSION=1.4.3 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ PMM_SERVER_VERSION=1.4.3 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:3-dev-latest logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 02:16:27 | one-pod/2-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:3-dev-latest logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export CERT_MANAGER_VER=1.18.2 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ CERT_MANAGER_VER=1.18.2 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export MINIO_VER=5.4.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ MINIO_VER=5.4.0 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export CHAOS_MESH_VER=2.7.2 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ CHAOS_MESH_VER=2.7.2 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ export VAULT_VER=0.16.1 logger.go:42: 02:16:27 | one-pod/2-write-data | +++ VAULT_VER=0.16.1 logger.go:42: 02:16:27 | one-pod/2-write-data | ++++ which gdate logger.go:42: 02:16:27 | one-pod/2-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 02:16:27 | one-pod/2-write-data | ++++ which date logger.go:42: 02:16:27 | one-pod/2-write-data | +++ date=/usr/sbin/date logger.go:42: 02:16:27 | one-pod/2-write-data | +++ oc get projects logger.go:42: 02:16:27 | one-pod/2-write-data | +++ : logger.go:42: 02:16:27 | one-pod/2-write-data | +++ kubectl get nodes logger.go:42: 02:16:27 | one-pod/2-write-data | +++ grep '^minikube' logger.go:42: 02:16:28 | one-pod/2-write-data | +++ which gsed logger.go:42: 02:16:28 | one-pod/2-write-data | which: no gsed in (/mnt/jenkins/workspace/cloud-ps-operator_PR-1164/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 02:16:28 | one-pod/2-write-data | +++ which sed logger.go:42: 02:16:28 | one-pod/2-write-data | ++ sed=/usr/sbin/sed logger.go:42: 02:16:28 | one-pod/2-write-data | ++ oc get projects logger.go:42: 02:16:28 | one-pod/2-write-data | +++ kubectl version -o json logger.go:42: 02:16:28 | one-pod/2-write-data | +++ jq -r .serverVersion.gitVersion logger.go:42: 02:16:28 | one-pod/2-write-data | +++ grep '\-eks\-' logger.go:42: 02:16:28 | one-pod/2-write-data | grep: warning: stray \ before - logger.go:42: 02:16:28 | one-pod/2-write-data | Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 logger.go:42: 02:16:28 | one-pod/2-write-data | ++ '[' ']' logger.go:42: 02:16:28 | one-pod/2-write-data | ++ EKS=0 logger.go:42: 02:16:28 | one-pod/2-write-data | + sleep 5 logger.go:42: 02:16:33 | one-pod/2-write-data | +++ get_cluster_name logger.go:42: 02:16:33 | one-pod/2-write-data | +++ kubectl -n kuttl-test-meet-gnat get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 02:16:34 | one-pod/2-write-data | ++ get_haproxy_svc one-pod logger.go:42: 02:16:34 | one-pod/2-write-data | ++ local cluster=one-pod logger.go:42: 02:16:34 | one-pod/2-write-data | ++ echo one-pod-haproxy logger.go:42: 02:16:34 | one-pod/2-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h one-pod-haproxy' logger.go:42: 02:16:34 | one-pod/2-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 02:16:34 | one-pod/2-write-data | + local 'host=-h one-pod-haproxy' logger.go:42: 02:16:34 | one-pod/2-write-data | ++ get_user_pass root logger.go:42: 02:16:34 | one-pod/2-write-data | ++ local user=root logger.go:42: 02:16:34 | one-pod/2-write-data | +++ get_cluster_name logger.go:42: 02:16:34 | one-pod/2-write-data | +++ kubectl -n kuttl-test-meet-gnat get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 02:16:34 | one-pod/2-write-data | ++ local secret=one-pod-secrets logger.go:42: 02:16:34 | one-pod/2-write-data | ++ kubectl -n kuttl-test-meet-gnat get secret one-pod-secrets -o 'jsonpath={.data.root}' logger.go:42: 02:16:34 | one-pod/2-write-data | ++ base64 --decode logger.go:42: 02:16:35 | one-pod/2-write-data | + local 'user=-uroot -p'\''g Initializing ps-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal PersistentVolumeClaim datadir-one-pod-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-d4e56869-0cf5-40fd-8c4e-e8bfa4266230 pd.csi.storage.gke.io_gke-21f96dd86d6e4f94b08d-b0ad-27d6-vm_e00da6a1-600c-4dfb-bd9e-ff5381cb4c8e logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-mysql-0 Binding Scheduled Successfully assigned kuttl-test-meet-gnat/one-pod-mysql-0 to gke-jen-ps-1164-9bd7f24f-default-pool-63271370-8m05 default-scheduler logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 107ms (107ms including waiting). Image size: 77414761 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 100ms (100ms including waiting). Image size: 77414761 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:14:58 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:02 +0000 UTC Normal Pod one-pod-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d4e56869-0cf5-40fd-8c4e-e8bfa4266230" attachdetach-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:04 +0000 UTC Normal Pod one-pod-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:04 +0000 UTC Normal Pod one-pod-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" in 103ms (103ms including waiting). Image size: 113063209 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:04 +0000 UTC Normal Pod one-pod-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:04 +0000 UTC Normal Pod one-pod-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:05 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:20 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 14.498s (14.498s including waiting). Image size: 417810065 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:20 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:20 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:20 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:39 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 18.798s (18.798s including waiting). Image size: 545362941 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:39 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:39 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:39 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:39 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 100ms (100ms including waiting). Image size: 139956959 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:39 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:39 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:15:59 +0000 UTC Normal PodDisruptionBudget.policy one-pod-haproxy NoPods No matching pods found controllermanager logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:00 +0000 UTC Normal Pod one-pod-haproxy-0 Binding Scheduled Successfully assigned kuttl-test-meet-gnat/one-pod-haproxy-0 to gke-jen-ps-1164-9bd7f24f-default-pool-63271370-lft8 default-scheduler logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:00 +0000 UTC Normal Pod one-pod-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:00 +0000 UTC Normal Pod one-pod-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" in 126ms (126ms including waiting). Image size: 113063209 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:00 +0000 UTC Normal Pod one-pod-haproxy-0.spec.initContainers{haproxy-init} Created Created container: haproxy-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:00 +0000 UTC Normal Pod one-pod-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:00 +0000 UTC Normal StatefulSet.apps one-pod-haproxy SuccessfulCreate create Pod one-pod-haproxy-0 in StatefulSet one-pod-haproxy successful statefulset-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:02 +0000 UTC Normal Pod one-pod-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:03 +0000 UTC Normal Pod one-pod-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 128ms (128ms including waiting). Image size: 111182257 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:03 +0000 UTC Normal Pod one-pod-haproxy-0.spec.containers{haproxy} Created Created container: haproxy kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:03 +0000 UTC Normal Pod one-pod-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:03 +0000 UTC Normal Pod one-pod-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:03 +0000 UTC Normal Pod one-pod-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 85ms (85ms including waiting). Image size: 111182257 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:03 +0000 UTC Normal Pod one-pod-haproxy-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:03 +0000 UTC Normal Pod one-pod-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:26 +0000 UTC Warning PerconaServerMySQL.ps.percona.com one-pod ClusterStateChanged Initializing -> Ready ps-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:41 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787 Binding Scheduled Successfully assigned kuttl-test-meet-gnat/xb-one-pod-minio-minio-kw787 to gke-jen-ps-1164-9bd7f24f-default-pool-63271370-8gl9 default-scheduler logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:41 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:41 +0000 UTC Normal Job.batch xb-one-pod-minio-minio SuccessfulCreate Created pod: xb-one-pod-minio-minio-kw787 job-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:42 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" in 130ms (130ms including waiting). Image size: 113063209 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:42 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:42 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:44 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:59 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 15.721s (15.721s including waiting). Image size: 545362941 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:59 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:16:59 +0000 UTC Normal Pod xb-one-pod-minio-minio-kw787.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:16 +0000 UTC Normal Job.batch xb-one-pod-minio-minio Completed Job completed job-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:31 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:31 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:31 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:31 +0000 UTC Normal StatefulSet.apps one-pod-mysql SuccessfulDelete delete Pod one-pod-mysql-0 in StatefulSet one-pod-mysql successful statefulset-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:32 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:32 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:32 +0000 UTC Normal StatefulSet.apps one-pod-orc SuccessfulDelete delete Pod one-pod-orc-0 in StatefulSet one-pod-orc successful statefulset-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:32 +0000 UTC Warning PerconaServerMySQL.ps.percona.com one-pod ClusterStateChanged Ready -> Paused ps-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:34 +0000 UTC Warning Pod one-pod-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/11/12 02:17:34 MySQL state is not ready... kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:37 +0000 UTC Warning Pod one-pod-haproxy-0.spec.containers{haproxy} Unhealthy Readiness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:38 +0000 UTC Warning Pod one-pod-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:57 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv Binding Scheduled Successfully assigned kuttl-test-meet-gnat/xb-restore-one-pod-restore-minio-l5lxv to gke-jen-ps-1164-9bd7f24f-default-pool-63271370-8gl9 default-scheduler logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:17:57 +0000 UTC Normal Job.batch xb-restore-one-pod-restore-minio SuccessfulCreate Created pod: xb-restore-one-pod-restore-minio-l5lxv job-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:00 +0000 UTC Warning Pod one-pod-haproxy-0.spec.containers{haproxy} Unhealthy Liveness probe failed: ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 2 kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:11 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d4e56869-0cf5-40fd-8c4e-e8bfa4266230" attachdetach-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:12 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:12 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" in 140ms (140ms including waiting). Image size: 113063209 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:13 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv.spec.initContainers{xtrabackup-init} Created Created container: xtrabackup-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:13 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:14 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:14 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 85ms (85ms including waiting). Image size: 545362941 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:14 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:14 +0000 UTC Normal Pod xb-restore-one-pod-restore-minio-l5lxv.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:24 +0000 UTC Normal Job.batch xb-restore-one-pod-restore-minio Completed Job completed job-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:25 +0000 UTC Normal Pod one-pod-mysql-0 Binding Scheduled Successfully assigned kuttl-test-meet-gnat/one-pod-mysql-0 to gke-jen-ps-1164-9bd7f24f-default-pool-63271370-8m05 default-scheduler logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:25 +0000 UTC Warning Pod one-pod-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-d4e56869-0cf5-40fd-8c4e-e8bfa4266230" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:25 +0000 UTC Normal Pod one-pod-orc-0 Binding Scheduled Successfully assigned kuttl-test-meet-gnat/one-pod-orc-0 to gke-jen-ps-1164-9bd7f24f-default-pool-63271370-8gl9 default-scheduler logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:25 +0000 UTC Normal Pod one-pod-orc-0.spec.initContainers{orchestrator-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:25 +0000 UTC Normal Pod one-pod-orc-0.spec.initContainers{orchestrator-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" in 132ms (132ms including waiting). Image size: 113063209 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:25 +0000 UTC Normal Pod one-pod-orc-0.spec.initContainers{orchestrator-init} Created Created container: orchestrator-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:25 +0000 UTC Normal Pod one-pod-orc-0.spec.initContainers{orchestrator-init} Started Started container orchestrator-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:25 +0000 UTC Warning PerconaServerMySQL.ps.percona.com one-pod ClusterStateChanged Paused -> Initializing ps-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:27 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:27 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 86ms (86ms including waiting). Image size: 77414761 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:27 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Created Created container: orchestrator kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:27 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Started Started container orchestrator kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:27 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:27 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 85ms (85ms including waiting). Image size: 77414761 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:27 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Created Created container: mysql-monit kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:27 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:42 +0000 UTC Normal Pod one-pod-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d4e56869-0cf5-40fd-8c4e-e8bfa4266230" attachdetach-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:44 +0000 UTC Normal Pod one-pod-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:44 +0000 UTC Normal Pod one-pod-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-1164-9bd7f24f" in 155ms (155ms including waiting). Image size: 113063209 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:44 +0000 UTC Normal Pod one-pod-mysql-0.spec.initContainers{mysql-init} Created Created container: mysql-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:44 +0000 UTC Normal Pod one-pod-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql8.0" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql8.0" in 130ms (130ms including waiting). Image size: 417810065 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Created Created container: mysql kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup8.0" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup8.0" in 110ms (110ms including waiting). Image size: 545362941 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Created Created container: xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 110ms (110ms including waiting). Image size: 139956959 bytes. kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Created Created container: pt-heartbeat kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:18:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:20 +0000 UTC Warning PerconaServerMySQL.ps.percona.com one-pod AsyncReplicationNotReady orchestrator: unable to determine cluster name ps-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:26 +0000 UTC Warning PerconaServerMySQL.ps.percona.com one-pod AsyncReplicationNotReady one-pod-mysql-0: [replication_lag] ps-controller logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:46 +0000 UTC Normal Pod one-pod-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:46 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{orchestrator} Killing Stopping container orchestrator kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:46 +0000 UTC Normal Pod one-pod-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:49 +0000 UTC Warning Pod one-pod-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2025/11/12 02:19:49 MySQL state is not ready... kubelet logger.go:42: 02:19:58 | one-pod | 2025-11-12 02:19:54 +0000 UTC Warning Pod one-pod-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 02:19:58 | one-pod | Deleting namespace: kuttl-test-meet-gnat === NAME kuttl harness.go:403: run tests finished harness.go:510: cleaning up harness.go:567: removing temp folder: "" --- PASS: kuttl (390.61s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/one-pod (389.88s) PASS