=== RUN kuttl harness.go:464: starting setup harness.go:255: running tests using configured kubeconfig. harness.go:278: Successful connection to cluster at: https://34.56.247.139 harness.go:363: running tests harness.go:75: going to run test suite with timeout of 180 seconds for each step harness.go:375: testsuite: e2e-tests/tests has 34 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup === PAUSE kuttl/harness/demand-backup === CONT kuttl/harness/demand-backup logger.go:42: 12:43:21 | demand-backup | Creating namespace: kuttl-test-gentle-eft logger.go:42: 12:43:21 | demand-backup/0-minio-secret | starting test step 0-minio-secret logger.go:42: 12:43:22 | demand-backup/0-minio-secret | Secret:kuttl-test-gentle-eft/minio-secret created logger.go:42: 12:43:22 | demand-backup/0-minio-secret | test step completed 0-minio-secret logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | starting test step 1-deploy-operator logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep apply_s3_storage_secrets deploy_operator deploy_non_tls_cluster_secrets deploy_tls_cluster_secrets deploy_client deploy_minio] logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | + source ../../functions logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ realpath ../../.. logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | ++++ pwd logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | ++ test_name=demand-backup logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ GIT_BRANCH=PR-767 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | ++++ which gdate logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | ++++ which date logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ date=/usr/bin/date logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ command -v oc logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ kubectl get nodes logger.go:42: 12:43:22 | demand-backup/1-deploy-operator | +++ grep '^minikube' logger.go:42: 12:43:23 | demand-backup/1-deploy-operator | + init_temp_dir logger.go:42: 12:43:23 | demand-backup/1-deploy-operator | + rm -rf /tmp/kuttl/ps/demand-backup logger.go:42: 12:43:23 | demand-backup/1-deploy-operator | + mkdir -p /tmp/kuttl/ps/demand-backup logger.go:42: 12:43:23 | demand-backup/1-deploy-operator | + apply_s3_storage_secrets logger.go:42: 12:43:23 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-gentle-eft apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf/minio-secret.yml logger.go:42: 12:43:24 | demand-backup/1-deploy-operator | Warning: resource secrets/minio-secret is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 12:43:24 | demand-backup/1-deploy-operator | secret/minio-secret configured logger.go:42: 12:43:24 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-gentle-eft apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf/cloud-secret.yml logger.go:42: 12:43:25 | demand-backup/1-deploy-operator | secret/aws-s3-secret created logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | secret/gcp-cs-secret created logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | secret/azure-secret created logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | + deploy_operator logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | + destroy_operator logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | Error from server (NotFound): deployments.apps "percona-server-mysql-operator" not found logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | + true logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 12:43:26 | demand-backup/1-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 12:43:27 | demand-backup/1-deploy-operator | Error from server (NotFound): namespaces "ps-operator" not found logger.go:42: 12:43:27 | demand-backup/1-deploy-operator | + true logger.go:42: 12:43:27 | demand-backup/1-deploy-operator | + [[ -n ps-operator ]] logger.go:42: 12:43:27 | demand-backup/1-deploy-operator | + create_namespace ps-operator logger.go:42: 12:43:27 | demand-backup/1-deploy-operator | + local namespace=ps-operator logger.go:42: 12:43:27 | demand-backup/1-deploy-operator | + [[ -n '' ]] logger.go:42: 12:43:27 | demand-backup/1-deploy-operator | + kubectl delete namespace ps-operator --ignore-not-found logger.go:42: 12:43:27 | demand-backup/1-deploy-operator | + kubectl wait --for=delete namespace ps-operator logger.go:42: 12:43:28 | demand-backup/1-deploy-operator | + kubectl create namespace ps-operator logger.go:42: 12:43:28 | demand-backup/1-deploy-operator | namespace/ps-operator created logger.go:42: 12:43:28 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy/crd.yaml logger.go:42: 12:43:29 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 12:43:29 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 12:43:30 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 12:43:30 | demand-backup/1-deploy-operator | + '[' -n ps-operator ']' logger.go:42: 12:43:30 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy/cw-rbac.yaml logger.go:42: 12:43:31 | demand-backup/1-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="LOG_LEVEL").value) = "DEBUG"' logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | + kubectl -n ps-operator apply -f - logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 12:43:32 | demand-backup/1-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-767-8e07c66d"' /mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy/cw-operator.yaml logger.go:42: 12:43:34 | demand-backup/1-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 12:43:34 | demand-backup/1-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 12:43:34 | demand-backup/1-deploy-operator | + deploy_non_tls_cluster_secrets logger.go:42: 12:43:34 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-gentle-eft apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf/secrets.yaml logger.go:42: 12:43:35 | demand-backup/1-deploy-operator | secret/test-secrets created logger.go:42: 12:43:35 | demand-backup/1-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 12:43:35 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-gentle-eft apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf/ssl-secret.yaml logger.go:42: 12:43:36 | demand-backup/1-deploy-operator | secret/test-ssl created logger.go:42: 12:43:36 | demand-backup/1-deploy-operator | + deploy_client logger.go:42: 12:43:36 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-gentle-eft apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf/client.yaml logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | pod/mysql-client created logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | + deploy_minio logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | + local access_key logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | + local secret_key logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-gentle-eft get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | + access_key=some-access-key logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-gentle-eft get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 12:43:37 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + secret_key=some-secret-key logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + helm uninstall -n kuttl-test-gentle-eft minio-service logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-767/kubeconfig logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-767/kubeconfig logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | Error: uninstall: Release not loaded: minio-service: release: not found logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + : logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + helm repo remove minio logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-767/kubeconfig logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-767/kubeconfig logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | "minio" has been removed from your repositories logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + helm repo add minio https://charts.min.io/ logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-767/kubeconfig logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-767/kubeconfig logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | "minio" has been added to your repositories logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | +++ printf %q some-access-key logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | ++ printf %q some-access-key logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | +++ printf %q some-secret-key logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | ++ printf %q some-secret-key logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + retry 10 60 helm install minio-service -n kuttl-test-gentle-eft --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + local max=10 logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + local delay=60 logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + shift 2 logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + local n=1 logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | + helm install minio-service -n kuttl-test-gentle-eft --version 5.0.14 --set replicas=1 --set mode=standalone --set resources.requests.memory=256Mi --set rootUser=rootuser --set rootPassword=rootpass123 --set 'users[0].accessKey=some-access-key' --set 'users[0].secretKey=some-secret-key' --set 'users[0].policy=consoleAdmin' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set securityContext.enabled=false minio/minio logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-767/kubeconfig logger.go:42: 12:43:38 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-767/kubeconfig logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | NAME: minio-service logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | LAST DEPLOYED: Tue Nov 5 12:43:39 2024 logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | NAMESPACE: kuttl-test-gentle-eft logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | STATUS: deployed logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | REVISION: 1 logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | TEST SUITE: None logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | NOTES: logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | MinIO can be accessed via port 9000 on the following DNS name from within your cluster: logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | minio-service.kuttl-test-gentle-eft.svc.cluster.local logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | To access MinIO from localhost, run the below commands: logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | 1. export POD_NAME=$(kubectl get pods --namespace kuttl-test-gentle-eft -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | 2. kubectl port-forward $POD_NAME 9000 --namespace kuttl-test-gentle-eft logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | You can now access MinIO server on http://localhost:9000. Follow the below steps to connect to MinIO server with mc client: logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | 1. Download the MinIO mc client - https://min.io/docs/minio/linux/reference/minio-mc.html#quickstart logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | 2. export MC_HOST_minio-service-local=http://$(kubectl get secret --namespace kuttl-test-gentle-eft minio-service -o jsonpath="{.data.rootUser}" | base64 --decode):$(kubectl get secret --namespace kuttl-test-gentle-eft minio-service -o jsonpath="{.data.rootPassword}" | base64 --decode)@localhost:9000 logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | 3. mc ls minio-service-local logger.go:42: 12:44:20 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-gentle-eft get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:44:21 | demand-backup/1-deploy-operator | + MINIO_POD=minio-service-847fc8bb8d-xn5dz logger.go:42: 12:44:21 | demand-backup/1-deploy-operator | + wait_pod minio-service-847fc8bb8d-xn5dz logger.go:42: 12:44:21 | demand-backup/1-deploy-operator | + local pod=minio-service-847fc8bb8d-xn5dz logger.go:42: 12:44:21 | demand-backup/1-deploy-operator | + set +o xtrace logger.go:42: 12:44:21 | demand-backup/1-deploy-operator | minio-service-847fc8bb8d-xn5dztrue logger.go:42: 12:44:21 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-gentle-eft run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID='\''some-access-key'\'' AWS_SECRET_ACCESS_KEY='\''some-secret-key'\'' AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' logger.go:42: 12:44:26 | demand-backup/1-deploy-operator | If you don't see a command prompt, try pressing enter. logger.go:42: 12:44:27 | demand-backup/1-deploy-operator | warning: couldn't attach to pod/aws-cli, falling back to streaming logs: Internal error occurred: error attaching to container: container is in CONTAINER_EXITED state logger.go:42: 12:44:27 | demand-backup/1-deploy-operator | make_bucket: operator-testing logger.go:42: 12:44:30 | demand-backup/1-deploy-operator | pod "aws-cli" deleted [controller-runtime] log.SetLogger(...) was never called; logs will not be displayed. Detected at: > goroutine 36 [running]: > runtime/debug.Stack() > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/runtime/debug/stack.go:24 +0x5e > sigs.k8s.io/controller-runtime/pkg/log.eventuallyFulfillRoot() > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/log.go:60 +0xcd > sigs.k8s.io/controller-runtime/pkg/log.(*delegatingLogSink).WithName(0xc0002a9c00, {0x184a055, 0x14}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/deleg.go:147 +0x3e > github.com/go-logr/logr.Logger.WithName({{0x1acb7d8, 0xc0002a9c00}, 0x0}, {0x184a055?, 0xc000787f80?}) > /home/mowsiany/go/pkg/mod/github.com/go-logr/logr@v1.2.4/logr.go:336 +0x36 > sigs.k8s.io/controller-runtime/pkg/client.newClient(0x131ead3?, {0x0, 0xc00043ca10, {0x1accd90, 0xc0003a0ec0}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:122 +0xf1 > sigs.k8s.io/controller-runtime/pkg/client.New(0xc0003bc908?, {0x0, 0xc00043ca10, {0x1accd90, 0xc0003a0ec0}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:103 +0x7d > github.com/kudobuilder/kuttl/pkg/test/utils.NewRetryClient(0xc0003bc908, {0x0, 0xc00043ca10, {0x1accd90, 0xc0003a0ec0}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/utils/kubernetes.go:177 +0x127 > github.com/kudobuilder/kuttl/pkg/test.(*Harness).Client(0xc000397208, 0xe6?) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:323 +0x18e > github.com/kudobuilder/kuttl/pkg/test.(*Step).Create(0xc000658dd0, 0xc0005d0d00, {0xc000049f38, 0x15}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:177 +0x63 > github.com/kudobuilder/kuttl/pkg/test.(*Step).Run(0xc000658dd0, 0xc0005d0d00, {0xc000049f38, 0x15}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:457 +0x24a > github.com/kudobuilder/kuttl/pkg/test.(*Case).Run(0xc00024ea00, 0xc0005d0d00, 0xc0003da750) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/case.go:373 +0xaeb > github.com/kudobuilder/kuttl/pkg/test.(*Harness).RunTests.func1.1(0xc0005d0d00) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:401 +0x12e > testing.tRunner(0xc0005d0d00, 0xc000622030) > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1689 +0xfb > created by testing.(*T).Run in goroutine 35 > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1742 +0x390 logger.go:42: 12:44:30 | demand-backup/1-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-server-mysql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 12:44:30 | demand-backup/1-deploy-operator | ASSERT deployment percona-server-mysql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 12:44:31 | demand-backup/1-deploy-operator | INFO Found 1 resource(s). logger.go:42: 12:44:31 | demand-backup/1-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 12:44:31 | demand-backup/1-deploy-operator | percona-server-mysql-operator ps-operator 1 logger.go:42: 12:44:31 | demand-backup/1-deploy-operator | ASSERT PASS logger.go:42: 12:44:31 | demand-backup/1-deploy-operator | test step completed 1-deploy-operator logger.go:42: 12:44:31 | demand-backup/2-create-cluster | starting test step 2-create-cluster logger.go:42: 12:44:31 | demand-backup/2-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval ".spec.mysql.clusterType=\"async\"" - \ | yq eval ".spec.mysql.size=3" - \ | yq eval ".spec.proxy.haproxy.enabled=true" - \ | yq eval ".spec.proxy.haproxy.size=3" - \ | yq eval ".spec.orchestrator.enabled=true" - \ | yq eval ".spec.orchestrator.size=3" - \ | yq eval ".spec.backup.storages.minio.type=\"s3\"" - \ | yq eval ".spec.backup.storages.minio.s3.bucket=\"operator-testing\"" - \ | yq eval ".spec.backup.storages.minio.s3.credentialsSecret=\"minio-secret\"" - \ | yq eval ".spec.backup.storages.minio.s3.endpointUrl=\"http://minio-service.${NAMESPACE}:9000\"" - \ | yq eval ".spec.backup.storages.minio.s3.region=\"us-east-1\"" - \ | yq eval ".spec.backup.storages.aws-s3.type=\"s3\"" - \ | yq eval ".spec.backup.storages.aws-s3.verifyTLS=true" - \ | yq eval ".spec.backup.storages.aws-s3.s3.bucket=\"operator-testing\"" - \ | yq eval ".spec.backup.storages.aws-s3.s3.credentialsSecret=\"aws-s3-secret\"" - \ | yq eval ".spec.backup.storages.aws-s3.s3.region=\"us-east-1\"" - \ | yq eval ".spec.backup.storages.aws-s3.s3.prefix=\"ps\"" - \ | yq eval ".spec.backup.storages.gcp-cs.type=\"gcs\"" - \ | yq eval ".spec.backup.storages.gcp-cs.verifyTLS=true" - \ | yq eval ".spec.backup.storages.gcp-cs.gcs.bucket=\"operator-testing\"" - \ | yq eval ".spec.backup.storages.gcp-cs.gcs.credentialsSecret=\"gcp-cs-secret\"" - \ | yq eval ".spec.backup.storages.gcp-cs.gcs.endpointUrl=\"https://storage.googleapis.com\"" - \ | yq eval ".spec.backup.storages.gcp-cs.gcs.prefix=\"ps\"" - \ | yq eval ".spec.backup.storages.azure-blob.type=\"azure\"" - \ | yq eval ".spec.backup.storages.azure-blob.verifyTLS=true" - \ | yq eval ".spec.backup.storages.azure-blob.azure.containerName=\"operator-testing\"" - \ | yq eval ".spec.backup.storages.azure-blob.azure.credentialsSecret=\"azure-secret\"" - \ | yq eval ".spec.backup.storages.azure-blob.azure.prefix=\"ps\"" - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + source ../../functions logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ realpath ../../.. logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++++ pwd logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ test_name=demand-backup logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ GIT_BRANCH=PR-767 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++++ which gdate logger.go:42: 12:44:31 | demand-backup/2-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++++ which date logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ date=/usr/bin/date logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ command -v oc logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ kubectl get nodes logger.go:42: 12:44:31 | demand-backup/2-create-cluster | +++ grep '^minikube' logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval .spec.mysql.size=3 - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + get_cr logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + local name_suffix= logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.azure-blob.verifyTLS=true - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.endpointUrl="http://minio-service.kuttl-test-gentle-eft:9000"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.size=3 - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.type="s3"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.type="s3"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-767-8e07c66d"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.aws-s3.verifyTLS=true - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.type="azure"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.gcp-cs.verifyTLS=true - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.metadata.name="%s"' demand-backup logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.metadata.name="demand-backup"' /mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy/cr.yaml logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + '[' -n '' ']' logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.containerName="operator-testing"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + kubectl -n kuttl-test-gentle-eft apply -f - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:dev-latest logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:dev-latest"' - logger.go:42: 12:44:31 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:44:31 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 12:44:33 | demand-backup/2-create-cluster | perconaservermysql.ps.percona.com/demand-backup created logger.go:42: 12:50:01 | demand-backup/2-create-cluster | test step completed 2-create-cluster logger.go:42: 12:50:01 | demand-backup/3-write-data | starting test step 3-write-data logger.go:42: 12:50:01 | demand-backup/3-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"] logger.go:42: 12:50:01 | demand-backup/3-write-data | + source ../../functions logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ realpath ../../.. logger.go:42: 12:50:01 | demand-backup/3-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:50:01 | demand-backup/3-write-data | ++++ pwd logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:50:01 | demand-backup/3-write-data | ++ test_name=demand-backup logger.go:42: 12:50:01 | demand-backup/3-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:50:01 | demand-backup/3-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ GIT_BRANCH=PR-767 logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:50:01 | demand-backup/3-write-data | ++++ which gdate logger.go:42: 12:50:01 | demand-backup/3-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:50:01 | demand-backup/3-write-data | ++++ which date logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ date=/usr/bin/date logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ command -v oc logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ kubectl get nodes logger.go:42: 12:50:01 | demand-backup/3-write-data | +++ grep '^minikube' logger.go:42: 12:50:02 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 12:50:02 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:50:02 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:50:02 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 12:50:02 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 12:50:02 | demand-backup/3-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:50:02 | demand-backup/3-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 12:50:02 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:50:02 | demand-backup/3-write-data | + local pod= logger.go:42: 12:50:02 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 12:50:02 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:50:02 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 12:50:02 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 12:50:02 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 12:50:02 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 12:50:03 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 12:50:03 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 12:50:03 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:50:03 | demand-backup/3-write-data | + kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:50:04 | demand-backup/3-write-data | + : logger.go:42: 12:50:04 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 12:50:04 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:50:05 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:50:05 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 12:50:05 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 12:50:05 | demand-backup/3-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:50:05 | demand-backup/3-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)' logger.go:42: 12:50:05 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:50:05 | demand-backup/3-write-data | + local pod= logger.go:42: 12:50:05 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 12:50:05 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:50:05 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 12:50:05 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 12:50:05 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 12:50:05 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 12:50:06 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 12:50:06 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 12:50:06 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:50:06 | demand-backup/3-write-data | + kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:50:07 | demand-backup/3-write-data | + : logger.go:42: 12:50:07 | demand-backup/3-write-data | test step completed 3-write-data logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | starting test step 4-move-primary-before-backup logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary_pod_from_label="$(get_primary_from_label)" kubectl delete pod -n ${NAMESPACE} ${primary_pod_from_label} wait_cluster_consistency_async "${test_name}" "3" "3" new_primary_pod_from_label="$(get_primary_from_label)" if [ "${primary_pod_from_label}" == "${new_primary_pod_from_label}" ]; then echo "Old (${primary_pod_from_label}) and new (${new_primary_pod_from_label}) primary are the same (the failover didn't happen)!" exit 1 fi] logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | + source ../../functions logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ realpath ../../.. logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | ++++ pwd logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | ++ test_name=demand-backup logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ GIT_BRANCH=PR-767 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | ++++ which gdate logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | ++++ which date logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ date=/usr/bin/date logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ command -v oc logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ kubectl get nodes logger.go:42: 12:50:07 | demand-backup/4-move-primary-before-backup | +++ grep '^minikube' logger.go:42: 12:50:08 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 12:50:08 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-gentle-eft get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 12:50:08 | demand-backup/4-move-primary-before-backup | + primary_pod_from_label=demand-backup-mysql-0 logger.go:42: 12:50:08 | demand-backup/4-move-primary-before-backup | + kubectl delete pod -n kuttl-test-gentle-eft demand-backup-mysql-0 logger.go:42: 12:50:09 | demand-backup/4-move-primary-before-backup | pod "demand-backup-mysql-0" deleted logger.go:42: 12:50:29 | demand-backup/4-move-primary-before-backup | + wait_cluster_consistency_async demand-backup 3 3 logger.go:42: 12:50:29 | demand-backup/4-move-primary-before-backup | + local cluster_name=demand-backup logger.go:42: 12:50:29 | demand-backup/4-move-primary-before-backup | + local cluster_size=3 logger.go:42: 12:50:29 | demand-backup/4-move-primary-before-backup | + local orc_size=3 logger.go:42: 12:50:29 | demand-backup/4-move-primary-before-backup | + '[' -z 3 ']' logger.go:42: 12:50:29 | demand-backup/4-move-primary-before-backup | + sleep 7 logger.go:42: 12:50:36 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:50:37 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 12:50:37 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 12:50:37 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 12:50:37 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 12:50:52 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:50:52 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 12:50:52 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 12:50:52 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 12:50:52 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 12:51:07 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:51:08 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 12:51:08 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 12:51:08 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 12:51:08 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 12:51:23 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:51:23 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 12:51:23 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 12:51:23 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 12:51:23 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 12:51:38 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.mysql.state}' logger.go:42: 12:51:39 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 12:51:39 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.mysql.ready}' logger.go:42: 12:51:39 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 12:51:39 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.orchestrator.ready}' logger.go:42: 12:51:40 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 12:51:40 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.orchestrator.state}' logger.go:42: 12:51:40 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 12:51:40 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-gentle-eft -o 'jsonpath={.status.state}' logger.go:42: 12:51:40 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 12:51:40 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 12:51:40 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-gentle-eft get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 12:51:41 | demand-backup/4-move-primary-before-backup | + new_primary_pod_from_label=demand-backup-mysql-1 logger.go:42: 12:51:41 | demand-backup/4-move-primary-before-backup | + '[' demand-backup-mysql-0 == demand-backup-mysql-1 ']' logger.go:42: 12:51:41 | demand-backup/4-move-primary-before-backup | test step completed 4-move-primary-before-backup logger.go:42: 12:51:41 | demand-backup/5-create-backup-minio | starting test step 5-create-backup-minio logger.go:42: 12:51:42 | demand-backup/5-create-backup-minio | PerconaServerMySQLBackup:kuttl-test-gentle-eft/demand-backup-minio created logger.go:42: 12:51:57 | demand-backup/5-create-backup-minio | test step completed 5-create-backup-minio logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | starting test step 6-check-password-leak logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | + source ../../functions logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ realpath ../../.. logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | ++++ pwd logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | ++ test_name=demand-backup logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ GIT_BRANCH=PR-767 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | ++++ which gdate logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | ++++ which date logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ date=/usr/bin/date logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ command -v oc logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ kubectl get nodes logger.go:42: 12:51:57 | demand-backup/6-check-password-leak | +++ grep '^minikube' logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | + check_passwords_leak logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | + local secrets logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | + local passwords logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | + local pods logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12") or test("namespace")) | not) | .value' logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | + secrets= logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | + passwords=' ' logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pods -o name logger.go:42: 12:51:58 | demand-backup/6-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + pods='demand-backup-haproxy-0 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | demand-backup-haproxy-1 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | demand-backup-haproxy-2 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | demand-backup-mysql-0 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | demand-backup-mysql-1 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | demand-backup-mysql-2 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | demand-backup-orc-0 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | demand-backup-orc-1 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | demand-backup-orc-2 logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | minio-service-847fc8bb8d-xn5dz logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | mysql-client logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | xb-demand-backup-minio-minio-6mfj8' logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + collect_logs kuttl-test-gentle-eft logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + local containers logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + local count logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + NS=kuttl-test-gentle-eft logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:51:59 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-0 -c haproxy logger.go:42: 12:52:00 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 12:52:00 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 12:52:00 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:00 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-0 -c mysql-monit logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:01 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-1 -c haproxy logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-1 -c mysql-monit logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:02 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:03 | demand-backup/6-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:52:03 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:03 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-2 -c haproxy logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-2 -c mysql-monit logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:04 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:05 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:52:05 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:05 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-0 -c mysql logger.go:42: 12:52:05 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 12:52:05 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 12:52:05 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:05 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-0 -c xtrabackup logger.go:42: 12:52:06 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 12:52:06 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 12:52:06 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:06 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-0 -c pt-heartbeat logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:07 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-1 -c mysql logger.go:42: 12:52:08 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 12:52:08 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 12:52:08 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:08 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-1 -c xtrabackup logger.go:42: 12:52:08 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 12:52:08 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 12:52:08 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:08 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-1 -c pt-heartbeat logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:09 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-2 -c mysql logger.go:42: 12:52:10 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 12:52:10 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 12:52:10 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:10 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-2 -c xtrabackup logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-2 -c pt-heartbeat logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:11 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:12 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:52:12 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:12 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-0 -c orc logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-0 -c mysql-monit logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:13 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:14 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:52:14 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:14 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-1 -c orc logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-1 -c mysql-monit logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:15 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:16 | demand-backup/6-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:52:16 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:16 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-2 -c orc logger.go:42: 12:52:16 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 12:52:16 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 12:52:16 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:16 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-2 -c mysql-monit logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod minio-service-847fc8bb8d-xn5dz -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | + containers=minio logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:17 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs minio-service-847fc8bb8d-xn5dz -c minio logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-847fc8bb8d-xn5dz-minio.txt logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-847fc8bb8d-xn5dz-minio.txt logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | + containers=mysql-client logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:18 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs mysql-client -c mysql-client logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod xb-demand-backup-minio-minio-6mfj8 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | + containers=xtrabackup logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:19 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs xb-demand-backup-minio-minio-6mfj8 -c xtrabackup logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-6mfj8-xtrabackup.txt logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-6mfj8-xtrabackup.txt logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + '[' -n ps-operator ']' logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | ++ kubectl -n ps-operator get pods -o name logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + pods=percona-server-mysql-operator-7b94cfff5f-v6bxh logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + collect_logs ps-operator logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + local containers logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + local count logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + NS=ps-operator logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 12:52:20 | demand-backup/6-check-password-leak | ++ kubectl -n ps-operator get pod percona-server-mysql-operator-7b94cfff5f-v6bxh -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:52:21 | demand-backup/6-check-password-leak | + containers=manager logger.go:42: 12:52:21 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 12:52:21 | demand-backup/6-check-password-leak | + kubectl -n ps-operator logs percona-server-mysql-operator-7b94cfff5f-v6bxh -c manager logger.go:42: 12:52:21 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-7b94cfff5f-v6bxh-manager.txt logger.go:42: 12:52:21 | demand-backup/6-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-7b94cfff5f-v6bxh-manager.txt logger.go:42: 12:52:21 | demand-backup/6-check-password-leak | + echo logger.go:42: 12:52:21 | demand-backup/6-check-password-leak | logger.go:42: 12:52:21 | demand-backup/6-check-password-leak | test step completed 6-check-password-leak logger.go:42: 12:52:21 | demand-backup/7-delete-data | starting test step 7-delete-data logger.go:42: 12:52:21 | demand-backup/7-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 12:52:21 | demand-backup/7-delete-data | + source ../../functions logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ realpath ../../.. logger.go:42: 12:52:21 | demand-backup/7-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:52:21 | demand-backup/7-delete-data | ++++ pwd logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:52:21 | demand-backup/7-delete-data | ++ test_name=demand-backup logger.go:42: 12:52:21 | demand-backup/7-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:52:21 | demand-backup/7-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ GIT_BRANCH=PR-767 logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:52:21 | demand-backup/7-delete-data | ++++ which gdate logger.go:42: 12:52:21 | demand-backup/7-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:52:21 | demand-backup/7-delete-data | ++++ which date logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ date=/usr/bin/date logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ command -v oc logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ kubectl get nodes logger.go:42: 12:52:21 | demand-backup/7-delete-data | +++ grep '^minikube' logger.go:42: 12:52:22 | demand-backup/7-delete-data | +++ get_cluster_name logger.go:42: 12:52:22 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:52:22 | demand-backup/7-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:52:22 | demand-backup/7-delete-data | ++ local cluster=demand-backup logger.go:42: 12:52:22 | demand-backup/7-delete-data | ++ echo demand-backup-haproxy logger.go:42: 12:52:22 | demand-backup/7-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:52:22 | demand-backup/7-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 12:52:22 | demand-backup/7-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:52:22 | demand-backup/7-delete-data | + local pod= logger.go:42: 12:52:22 | demand-backup/7-delete-data | ++ get_client_pod logger.go:42: 12:52:22 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:52:23 | demand-backup/7-delete-data | + client_pod=mysql-client logger.go:42: 12:52:23 | demand-backup/7-delete-data | + wait_pod mysql-client logger.go:42: 12:52:23 | demand-backup/7-delete-data | + local pod=mysql-client logger.go:42: 12:52:23 | demand-backup/7-delete-data | + set +o xtrace logger.go:42: 12:52:23 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 12:52:23 | demand-backup/7-delete-data | + kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:52:23 | demand-backup/7-delete-data | + sed -e 's/mysql: //' logger.go:42: 12:52:23 | demand-backup/7-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:52:25 | demand-backup/7-delete-data | + : logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ get_cluster_name logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:52:25 | demand-backup/7-delete-data | + cluster_name=demand-backup logger.go:42: 12:52:25 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 12:52:25 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 12:52:25 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 12:52:25 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 12:52:26 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 12:52:26 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:26 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:52:26 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:52:27 | demand-backup/7-delete-data | ++ : logger.go:42: 12:52:27 | demand-backup/7-delete-data | + data= logger.go:42: 12:52:27 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 04-delete-data-minio-0 --from-literal=data= logger.go:42: 12:52:28 | demand-backup/7-delete-data | configmap/04-delete-data-minio-0 created logger.go:42: 12:52:28 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 12:52:28 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 12:52:28 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 12:52:28 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:52:28 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:52:30 | demand-backup/7-delete-data | ++ : logger.go:42: 12:52:30 | demand-backup/7-delete-data | + data= logger.go:42: 12:52:30 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 04-delete-data-minio-1 --from-literal=data= logger.go:42: 12:52:30 | demand-backup/7-delete-data | configmap/04-delete-data-minio-1 created logger.go:42: 12:52:30 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 12:52:30 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:30 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:52:30 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:30 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 12:52:30 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 12:52:30 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:52:31 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 12:52:31 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 12:52:31 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 12:52:31 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 12:52:31 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 12:52:31 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:52:31 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:52:31 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:52:33 | demand-backup/7-delete-data | ++ : logger.go:42: 12:52:33 | demand-backup/7-delete-data | + data= logger.go:42: 12:52:33 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 04-delete-data-minio-2 --from-literal=data= logger.go:42: 12:52:33 | demand-backup/7-delete-data | configmap/04-delete-data-minio-2 created logger.go:42: 12:52:34 | demand-backup/7-delete-data | test step completed 7-delete-data logger.go:42: 12:52:34 | demand-backup/8-restore-from-minio | starting test step 8-restore-from-minio logger.go:42: 12:52:34 | demand-backup/8-restore-from-minio | PerconaServerMySQLRestore:kuttl-test-gentle-eft/demand-backup-restore-minio created logger.go:42: 12:57:38 | demand-backup/8-restore-from-minio | test step completed 8-restore-from-minio logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | starting test step 9-check-password-leak logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | + source ../../functions logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ realpath ../../.. logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | ++++ pwd logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | ++ test_name=demand-backup logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ GIT_BRANCH=PR-767 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | ++++ which gdate logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | ++++ which date logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ date=/usr/bin/date logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ command -v oc logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ kubectl get nodes logger.go:42: 12:57:38 | demand-backup/9-check-password-leak | +++ grep '^minikube' logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + check_passwords_leak logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + local secrets logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + local passwords logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + local pods logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12") or test("namespace")) | not) | .value' logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + secrets= logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + passwords=' ' logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pods -o name logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + pods='demand-backup-haproxy-0 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | demand-backup-haproxy-1 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | demand-backup-haproxy-2 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | demand-backup-mysql-0 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | demand-backup-mysql-1 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | demand-backup-mysql-2 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | demand-backup-orc-0 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | demand-backup-orc-1 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | demand-backup-orc-2 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | minio-service-847fc8bb8d-xn5dz logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | mysql-client logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | xb-demand-backup-minio-minio-6mfj8 logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | xb-restore-demand-backup-restore-minio-99tx7' logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + collect_logs kuttl-test-gentle-eft logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + local containers logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + local count logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + NS=kuttl-test-gentle-eft logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:39 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:40 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:57:40 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:40 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-0 -c haproxy logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-0 -c mysql-monit logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:41 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:42 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:57:42 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:42 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-1 -c haproxy logger.go:42: 12:57:42 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 12:57:42 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 12:57:42 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:42 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-1 -c mysql-monit logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | + containers='haproxy mysql-monit' logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:43 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-2 -c haproxy logger.go:42: 12:57:44 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 12:57:44 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 12:57:44 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:44 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-haproxy-2 -c mysql-monit logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:45 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-0 -c mysql logger.go:42: 12:57:46 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 12:57:46 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 12:57:46 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:46 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-0 -c xtrabackup logger.go:42: 12:57:46 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 12:57:46 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 12:57:46 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:46 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-0 -c pt-heartbeat logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:47 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-1 -c mysql logger.go:42: 12:57:48 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 12:57:48 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 12:57:48 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:48 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-1 -c xtrabackup logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-1 -c pt-heartbeat logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:49 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:50 | demand-backup/9-check-password-leak | + containers='mysql xtrabackup pt-heartbeat' logger.go:42: 12:57:50 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:50 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-2 -c mysql logger.go:42: 12:57:50 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 12:57:50 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 12:57:50 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:50 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-2 -c xtrabackup logger.go:42: 12:57:51 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 12:57:51 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 12:57:51 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:51 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-mysql-2 -c pt-heartbeat logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:52 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-0 -c orc logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-0 -c mysql-monit logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:53 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:54 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:57:54 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:54 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-1 -c orc logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-1 -c mysql-monit logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:55 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod demand-backup-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:56 | demand-backup/9-check-password-leak | + containers='orc mysql-monit' logger.go:42: 12:57:56 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:56 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-2 -c orc logger.go:42: 12:57:56 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 12:57:56 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 12:57:56 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:56 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs demand-backup-orc-2 -c mysql-monit logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod minio-service-847fc8bb8d-xn5dz -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | + containers=minio logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:57 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs minio-service-847fc8bb8d-xn5dz -c minio logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-847fc8bb8d-xn5dz-minio.txt logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-minio-service-847fc8bb8d-xn5dz-minio.txt logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | + containers=mysql-client logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:58 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs mysql-client -c mysql-client logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-mysql-client-mysql-client.txt logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod xb-demand-backup-minio-minio-6mfj8 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | + containers=xtrabackup logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:57:59 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs xb-demand-backup-minio-minio-6mfj8 -c xtrabackup logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-6mfj8-xtrabackup.txt logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-demand-backup-minio-minio-6mfj8-xtrabackup.txt logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-gentle-eft get pod xb-restore-demand-backup-restore-minio-99tx7 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | + containers=xtrabackup logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:58:00 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-gentle-eft logs xb-restore-demand-backup-restore-minio-99tx7 -c xtrabackup logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-restore-demand-backup-restore-minio-99tx7-xtrabackup.txt logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-xb-restore-demand-backup-restore-minio-99tx7-xtrabackup.txt logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + '[' -n ps-operator ']' logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | ++ kubectl -n ps-operator get pods -o name logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + pods=percona-server-mysql-operator-7b94cfff5f-v6bxh logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + collect_logs ps-operator logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + local containers logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + local count logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + NS=ps-operator logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 12:58:01 | demand-backup/9-check-password-leak | ++ kubectl -n ps-operator get pod percona-server-mysql-operator-7b94cfff5f-v6bxh -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 12:58:02 | demand-backup/9-check-password-leak | + containers=manager logger.go:42: 12:58:02 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 12:58:02 | demand-backup/9-check-password-leak | + kubectl -n ps-operator logs percona-server-mysql-operator-7b94cfff5f-v6bxh -c manager logger.go:42: 12:58:02 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-7b94cfff5f-v6bxh-manager.txt logger.go:42: 12:58:02 | demand-backup/9-check-password-leak | logs saved in: /tmp/kuttl/ps/demand-backup/logs_output-percona-server-mysql-operator-7b94cfff5f-v6bxh-manager.txt logger.go:42: 12:58:02 | demand-backup/9-check-password-leak | + echo logger.go:42: 12:58:02 | demand-backup/9-check-password-leak | logger.go:42: 12:58:02 | demand-backup/9-check-password-leak | test step completed 9-check-password-leak logger.go:42: 12:58:02 | demand-backup/10-read-data | starting test step 10-read-data logger.go:42: 12:58:02 | demand-backup/10-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 12:58:02 | demand-backup/10-read-data | + source ../../functions logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ realpath ../../.. logger.go:42: 12:58:02 | demand-backup/10-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:02 | demand-backup/10-read-data | ++++ pwd logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:58:02 | demand-backup/10-read-data | ++ test_name=demand-backup logger.go:42: 12:58:02 | demand-backup/10-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:58:02 | demand-backup/10-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ GIT_BRANCH=PR-767 logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:58:02 | demand-backup/10-read-data | ++++ which gdate logger.go:42: 12:58:02 | demand-backup/10-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:58:02 | demand-backup/10-read-data | ++++ which date logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ date=/usr/bin/date logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ command -v oc logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ kubectl get nodes logger.go:42: 12:58:02 | demand-backup/10-read-data | +++ grep '^minikube' logger.go:42: 12:58:03 | demand-backup/10-read-data | ++ get_cluster_name logger.go:42: 12:58:03 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:58:03 | demand-backup/10-read-data | + cluster_name=demand-backup logger.go:42: 12:58:03 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 12:58:03 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:03 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:58:03 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:03 | demand-backup/10-read-data | ++ local pod= logger.go:42: 12:58:03 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 12:58:03 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:58:04 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 12:58:04 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 12:58:04 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 12:58:04 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 12:58:04 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 12:58:04 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:04 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:58:04 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:58:06 | demand-backup/10-read-data | + data=100500 logger.go:42: 12:58:06 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-minio-0 --from-literal=data=100500 logger.go:42: 12:58:06 | demand-backup/10-read-data | configmap/06-read-data-minio-0 created logger.go:42: 12:58:06 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 12:58:06 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:06 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:58:06 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:06 | demand-backup/10-read-data | ++ local pod= logger.go:42: 12:58:06 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 12:58:06 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:58:06 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 12:58:06 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 12:58:06 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 12:58:06 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 12:58:07 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 12:58:07 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:07 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:58:07 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:58:08 | demand-backup/10-read-data | + data=100500 logger.go:42: 12:58:08 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-minio-1 --from-literal=data=100500 logger.go:42: 12:58:09 | demand-backup/10-read-data | configmap/06-read-data-minio-1 created logger.go:42: 12:58:09 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ local pod= logger.go:42: 12:58:09 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 12:58:09 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 12:58:09 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:58:09 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:11 | demand-backup/10-read-data | + data=100500 logger.go:42: 12:58:11 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-minio-2 --from-literal=data=100500 logger.go:42: 12:58:11 | demand-backup/10-read-data | configmap/06-read-data-minio-2 created logger.go:42: 12:58:12 | demand-backup/10-read-data | test step completed 10-read-data logger.go:42: 12:58:12 | demand-backup/11-delete-data | starting test step 11-delete-data logger.go:42: 12:58:12 | demand-backup/11-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2 do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 12:58:12 | demand-backup/11-delete-data | + source ../../functions logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ realpath ../../.. logger.go:42: 12:58:12 | demand-backup/11-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:12 | demand-backup/11-delete-data | ++++ pwd logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:58:12 | demand-backup/11-delete-data | ++ test_name=demand-backup logger.go:42: 12:58:12 | demand-backup/11-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:58:12 | demand-backup/11-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ GIT_BRANCH=PR-767 logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:58:12 | demand-backup/11-delete-data | ++++ which gdate logger.go:42: 12:58:12 | demand-backup/11-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:58:12 | demand-backup/11-delete-data | ++++ which date logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ date=/usr/bin/date logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ command -v oc logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ kubectl get nodes logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ grep '^minikube' logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ get_cluster_name logger.go:42: 12:58:12 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:58:13 | demand-backup/11-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 12:58:13 | demand-backup/11-delete-data | ++ local cluster=demand-backup logger.go:42: 12:58:13 | demand-backup/11-delete-data | ++ echo demand-backup-haproxy logger.go:42: 12:58:13 | demand-backup/11-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:58:13 | demand-backup/11-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 12:58:13 | demand-backup/11-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:58:13 | demand-backup/11-delete-data | + local pod= logger.go:42: 12:58:13 | demand-backup/11-delete-data | ++ get_client_pod logger.go:42: 12:58:13 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:58:13 | demand-backup/11-delete-data | + client_pod=mysql-client logger.go:42: 12:58:13 | demand-backup/11-delete-data | + wait_pod mysql-client logger.go:42: 12:58:13 | demand-backup/11-delete-data | + local pod=mysql-client logger.go:42: 12:58:13 | demand-backup/11-delete-data | + set +o xtrace logger.go:42: 12:58:14 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 12:58:14 | demand-backup/11-delete-data | + sed -e 's/mysql: //' logger.go:42: 12:58:14 | demand-backup/11-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:58:14 | demand-backup/11-delete-data | + kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 12:58:15 | demand-backup/11-delete-data | + : logger.go:42: 12:58:15 | demand-backup/11-delete-data | ++ get_cluster_name logger.go:42: 12:58:15 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 12:58:16 | demand-backup/11-delete-data | + cluster_name=demand-backup logger.go:42: 12:58:16 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 12:58:16 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 12:58:16 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 12:58:16 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:58:16 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:58:18 | demand-backup/11-delete-data | ++ : logger.go:42: 12:58:18 | demand-backup/11-delete-data | + data= logger.go:42: 12:58:18 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 04-delete-data-minio-backup-source-0 --from-literal=data= logger.go:42: 12:58:18 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-0 created logger.go:42: 12:58:18 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 12:58:18 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:18 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:58:18 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:18 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 12:58:18 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 12:58:18 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:58:19 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 12:58:19 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 12:58:19 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 12:58:19 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 12:58:19 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 12:58:19 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:19 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:58:19 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ : logger.go:42: 12:58:21 | demand-backup/11-delete-data | + data= logger.go:42: 12:58:21 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 04-delete-data-minio-backup-source-1 --from-literal=data= logger.go:42: 12:58:21 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-1 created logger.go:42: 12:58:21 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 12:58:21 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 12:58:21 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 12:58:21 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 12:58:22 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 12:58:22 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 12:58:22 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 12:58:22 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 12:58:23 | demand-backup/11-delete-data | ++ : logger.go:42: 12:58:23 | demand-backup/11-delete-data | + data= logger.go:42: 12:58:23 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 04-delete-data-minio-backup-source-2 --from-literal=data= logger.go:42: 12:58:24 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-2 created logger.go:42: 12:58:24 | demand-backup/11-delete-data | test step completed 11-delete-data logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | starting test step 12-restore-from-minio-backup-source logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | running command: [sh -c set -o errexit set -o xtrace source ../../functions storage_name="minio" backup_name="demand-backup-minio" restore_name="demand-backup-restore-minio-backup-source" cluster_name="${test_name}${name_suffix:+-$name_suffix}" destination=$(kubectl -n "${NAMESPACE}" get ps-backup "${backup_name}" -o jsonpath='{.status.destination}') cat "${DEPLOY_DIR}/restore.yaml" \ | yq eval "$(printf '.metadata.name="%s"' "${restore_name}")" - \ | yq eval "$(printf '.spec.clusterName="%s"' "${cluster_name}")" - \ | yq eval "del(.spec.backupName)" - \ | yq eval "$(printf '.spec.backupSource.destination="%s"' "${destination}")" - \ | yq eval '.spec.backupSource.storage.type="s3"' - \ | yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - \ | yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - \ | yq eval "$(printf '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.%s:9000"' "${NAMESPACE}")" - \ | yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - \ | kubectl apply -n "${NAMESPACE}" -f -] logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | + source ../../functions logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ realpath ../../.. logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | ++++ pwd logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | ++ test_name=demand-backup logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export GIT_BRANCH=PR-767 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ GIT_BRANCH=PR-767 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export VERSION=PR-767-8e07c66d logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ VERSION=PR-767-8e07c66d logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | ++++ which gdate logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | ++++ which date logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ date=/usr/bin/date logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ command -v oc logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ kubectl get nodes logger.go:42: 12:58:24 | demand-backup/12-restore-from-minio-backup-source | +++ grep '^minikube' logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + storage_name=minio logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + backup_name=demand-backup-minio logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + restore_name=demand-backup-restore-minio-backup-source logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + cluster_name=demand-backup logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | ++ kubectl -n kuttl-test-gentle-eft get ps-backup demand-backup-minio -o 'jsonpath={.status.destination}' logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + destination=s3://operator-testing/demand-backup-2024-11-05-12:51:42-full logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + cat /mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy/restore.yaml logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.metadata.name="%s"' demand-backup-restore-minio-backup-source logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.clusterName="%s"' demand-backup logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.metadata.name="demand-backup-restore-minio-backup-source"' - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.type="s3"' - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.clusterName="demand-backup"' - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval 'del(.spec.backupName)' - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + kubectl apply -n kuttl-test-gentle-eft -f - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.%s:9000"' kuttl-test-gentle-eft logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.endpointUrl="http://minio-service.kuttl-test-gentle-eft:9000"' - logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.backupSource.destination="%s"' s3://operator-testing/demand-backup-2024-11-05-12:51:42-full logger.go:42: 12:58:25 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.destination="s3://operator-testing/demand-backup-2024-11-05-12:51:42-full"' - logger.go:42: 12:58:26 | demand-backup/12-restore-from-minio-backup-source | perconaservermysqlrestore.ps.percona.com/demand-backup-restore-minio-backup-source created logger.go:42: 13:03:37 | demand-backup/12-restore-from-minio-backup-source | test step completed 12-restore-from-minio-backup-source logger.go:42: 13:03:37 | demand-backup/13-read-data | starting test step 13-read-data logger.go:42: 13:03:37 | demand-backup/13-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2 do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 09-read-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 13:03:37 | demand-backup/13-read-data | + source ../../functions logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ realpath ../../.. logger.go:42: 13:03:37 | demand-backup/13-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:03:37 | demand-backup/13-read-data | ++++ pwd logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:03:37 | demand-backup/13-read-data | ++ test_name=demand-backup logger.go:42: 13:03:37 | demand-backup/13-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:03:37 | demand-backup/13-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ GIT_BRANCH=PR-767 logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:03:37 | demand-backup/13-read-data | ++++ which gdate logger.go:42: 13:03:37 | demand-backup/13-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:03:37 | demand-backup/13-read-data | ++++ which date logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ date=/usr/bin/date logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ command -v oc logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ kubectl get nodes logger.go:42: 13:03:37 | demand-backup/13-read-data | +++ grep '^minikube' logger.go:42: 13:03:37 | demand-backup/13-read-data | ++ get_cluster_name logger.go:42: 13:03:37 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:03:38 | demand-backup/13-read-data | + cluster_name=demand-backup logger.go:42: 13:03:38 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ local pod= logger.go:42: 13:03:38 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 13:03:38 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 13:03:38 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:03:38 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:03:40 | demand-backup/13-read-data | + data=100500 logger.go:42: 13:03:40 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 09-read-data-minio-backup-source-0 --from-literal=data=100500 logger.go:42: 13:03:40 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-0 created logger.go:42: 13:03:40 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 13:03:40 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:40 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:03:40 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:40 | demand-backup/13-read-data | ++ local pod= logger.go:42: 13:03:40 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 13:03:40 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:03:41 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 13:03:41 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 13:03:41 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 13:03:41 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 13:03:41 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 13:03:41 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:03:41 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:03:41 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:43 | demand-backup/13-read-data | + data=100500 logger.go:42: 13:03:43 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 09-read-data-minio-backup-source-1 --from-literal=data=100500 logger.go:42: 13:03:43 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-1 created logger.go:42: 13:03:43 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 13:03:43 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:43 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:03:43 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:43 | demand-backup/13-read-data | ++ local pod= logger.go:42: 13:03:43 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 13:03:43 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:03:43 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 13:03:43 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 13:03:43 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 13:03:43 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 13:03:44 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 13:03:44 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:03:44 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:03:44 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:03:45 | demand-backup/13-read-data | + data=100500 logger.go:42: 13:03:45 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 09-read-data-minio-backup-source-2 --from-literal=data=100500 logger.go:42: 13:03:45 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-2 created logger.go:42: 13:03:46 | demand-backup/13-read-data | test step completed 13-read-data logger.go:42: 13:03:46 | demand-backup/14-create-backup-s3 | starting test step 14-create-backup-s3 logger.go:42: 13:03:47 | demand-backup/14-create-backup-s3 | PerconaServerMySQLBackup:kuttl-test-gentle-eft/demand-backup-s3 created logger.go:42: 13:04:02 | demand-backup/14-create-backup-s3 | test step completed 14-create-backup-s3 logger.go:42: 13:04:02 | demand-backup/15-delete-data | starting test step 15-delete-data logger.go:42: 13:04:02 | demand-backup/15-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 08-delete-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 13:04:02 | demand-backup/15-delete-data | + source ../../functions logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ realpath ../../.. logger.go:42: 13:04:02 | demand-backup/15-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:04:02 | demand-backup/15-delete-data | ++++ pwd logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:04:02 | demand-backup/15-delete-data | ++ test_name=demand-backup logger.go:42: 13:04:02 | demand-backup/15-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:04:02 | demand-backup/15-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ GIT_BRANCH=PR-767 logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:04:02 | demand-backup/15-delete-data | ++++ which gdate logger.go:42: 13:04:02 | demand-backup/15-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:04:02 | demand-backup/15-delete-data | ++++ which date logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ date=/usr/bin/date logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ command -v oc logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ kubectl get nodes logger.go:42: 13:04:02 | demand-backup/15-delete-data | +++ grep '^minikube' logger.go:42: 13:04:03 | demand-backup/15-delete-data | +++ get_cluster_name logger.go:42: 13:04:03 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:04:03 | demand-backup/15-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 13:04:03 | demand-backup/15-delete-data | ++ local cluster=demand-backup logger.go:42: 13:04:03 | demand-backup/15-delete-data | ++ echo demand-backup-haproxy logger.go:42: 13:04:03 | demand-backup/15-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:04:03 | demand-backup/15-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 13:04:03 | demand-backup/15-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:04:03 | demand-backup/15-delete-data | + local pod= logger.go:42: 13:04:03 | demand-backup/15-delete-data | ++ get_client_pod logger.go:42: 13:04:03 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:04:04 | demand-backup/15-delete-data | + client_pod=mysql-client logger.go:42: 13:04:04 | demand-backup/15-delete-data | + wait_pod mysql-client logger.go:42: 13:04:04 | demand-backup/15-delete-data | + local pod=mysql-client logger.go:42: 13:04:04 | demand-backup/15-delete-data | + set +o xtrace logger.go:42: 13:04:04 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 13:04:04 | demand-backup/15-delete-data | + sed -e 's/mysql: //' logger.go:42: 13:04:04 | demand-backup/15-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:04:04 | demand-backup/15-delete-data | + kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:04:06 | demand-backup/15-delete-data | + : logger.go:42: 13:04:06 | demand-backup/15-delete-data | ++ get_cluster_name logger.go:42: 13:04:06 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:04:06 | demand-backup/15-delete-data | + cluster_name=demand-backup logger.go:42: 13:04:06 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 13:04:06 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:06 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:04:06 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:06 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 13:04:06 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 13:04:06 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:04:07 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 13:04:07 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 13:04:07 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 13:04:07 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 13:04:07 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 13:04:07 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:07 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:04:07 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:04:08 | demand-backup/15-delete-data | ++ : logger.go:42: 13:04:08 | demand-backup/15-delete-data | + data= logger.go:42: 13:04:08 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 08-delete-data-s3-0 --from-literal=data= logger.go:42: 13:04:09 | demand-backup/15-delete-data | configmap/08-delete-data-s3-0 created logger.go:42: 13:04:09 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 13:04:09 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:09 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:04:09 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:09 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 13:04:09 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 13:04:09 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:04:09 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 13:04:09 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 13:04:09 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 13:04:09 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 13:04:10 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 13:04:10 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:04:10 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:04:10 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:11 | demand-backup/15-delete-data | ++ : logger.go:42: 13:04:11 | demand-backup/15-delete-data | + data= logger.go:42: 13:04:11 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 08-delete-data-s3-1 --from-literal=data= logger.go:42: 13:04:11 | demand-backup/15-delete-data | configmap/08-delete-data-s3-1 created logger.go:42: 13:04:11 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 13:04:11 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:11 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:04:11 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:11 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 13:04:11 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 13:04:11 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:04:12 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 13:04:12 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 13:04:12 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 13:04:12 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 13:04:12 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 13:04:12 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:04:12 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:04:12 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:04:14 | demand-backup/15-delete-data | ++ : logger.go:42: 13:04:14 | demand-backup/15-delete-data | + data= logger.go:42: 13:04:14 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 08-delete-data-s3-2 --from-literal=data= logger.go:42: 13:04:14 | demand-backup/15-delete-data | configmap/08-delete-data-s3-2 created logger.go:42: 13:04:15 | demand-backup/15-delete-data | test step completed 15-delete-data logger.go:42: 13:04:15 | demand-backup/16-restore-from-s3 | starting test step 16-restore-from-s3 logger.go:42: 13:04:15 | demand-backup/16-restore-from-s3 | PerconaServerMySQLRestore:kuttl-test-gentle-eft/demand-backup-restore-s3 created logger.go:42: 13:09:27 | demand-backup/16-restore-from-s3 | test step completed 16-restore-from-s3 logger.go:42: 13:09:27 | demand-backup/17-read-data | starting test step 17-read-data logger.go:42: 13:09:27 | demand-backup/17-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 13:09:27 | demand-backup/17-read-data | + source ../../functions logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ realpath ../../.. logger.go:42: 13:09:27 | demand-backup/17-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:09:27 | demand-backup/17-read-data | ++++ pwd logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:09:27 | demand-backup/17-read-data | ++ test_name=demand-backup logger.go:42: 13:09:27 | demand-backup/17-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:09:27 | demand-backup/17-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ GIT_BRANCH=PR-767 logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:09:27 | demand-backup/17-read-data | ++++ which gdate logger.go:42: 13:09:27 | demand-backup/17-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:09:27 | demand-backup/17-read-data | ++++ which date logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ date=/usr/bin/date logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ command -v oc logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ kubectl get nodes logger.go:42: 13:09:27 | demand-backup/17-read-data | +++ grep '^minikube' logger.go:42: 13:09:27 | demand-backup/17-read-data | ++ get_cluster_name logger.go:42: 13:09:27 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:09:28 | demand-backup/17-read-data | + cluster_name=demand-backup logger.go:42: 13:09:28 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ local pod= logger.go:42: 13:09:28 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 13:09:28 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 13:09:28 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:09:28 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:30 | demand-backup/17-read-data | + data=100500 logger.go:42: 13:09:30 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-s3-0 --from-literal=data=100500 logger.go:42: 13:09:30 | demand-backup/17-read-data | configmap/06-read-data-s3-0 created logger.go:42: 13:09:30 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 13:09:30 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:30 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:09:30 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:30 | demand-backup/17-read-data | ++ local pod= logger.go:42: 13:09:30 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 13:09:30 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:09:31 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 13:09:31 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 13:09:31 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 13:09:31 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 13:09:31 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 13:09:31 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:31 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:09:31 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:09:33 | demand-backup/17-read-data | + data=100500 logger.go:42: 13:09:33 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-s3-1 --from-literal=data=100500 logger.go:42: 13:09:33 | demand-backup/17-read-data | configmap/06-read-data-s3-1 created logger.go:42: 13:09:33 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 13:09:33 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:33 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:09:33 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:33 | demand-backup/17-read-data | ++ local pod= logger.go:42: 13:09:33 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 13:09:33 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:09:33 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 13:09:33 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 13:09:33 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 13:09:33 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 13:09:34 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 13:09:34 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:34 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:09:34 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:09:35 | demand-backup/17-read-data | + data=100500 logger.go:42: 13:09:35 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-s3-2 --from-literal=data=100500 logger.go:42: 13:09:36 | demand-backup/17-read-data | configmap/06-read-data-s3-2 created logger.go:42: 13:09:36 | demand-backup/17-read-data | test step completed 17-read-data logger.go:42: 13:09:36 | demand-backup/18-create-backup-gcp | starting test step 18-create-backup-gcp logger.go:42: 13:09:37 | demand-backup/18-create-backup-gcp | PerconaServerMySQLBackup:kuttl-test-gentle-eft/demand-backup-gcp created logger.go:42: 13:09:47 | demand-backup/18-create-backup-gcp | test step completed 18-create-backup-gcp logger.go:42: 13:09:47 | demand-backup/19-delete-data | starting test step 19-delete-data logger.go:42: 13:09:47 | demand-backup/19-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 12-delete-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 13:09:47 | demand-backup/19-delete-data | + source ../../functions logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ realpath ../../.. logger.go:42: 13:09:47 | demand-backup/19-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:09:47 | demand-backup/19-delete-data | ++++ pwd logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:09:47 | demand-backup/19-delete-data | ++ test_name=demand-backup logger.go:42: 13:09:47 | demand-backup/19-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:09:47 | demand-backup/19-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ GIT_BRANCH=PR-767 logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:09:47 | demand-backup/19-delete-data | ++++ which gdate logger.go:42: 13:09:47 | demand-backup/19-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:09:47 | demand-backup/19-delete-data | ++++ which date logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ date=/usr/bin/date logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ command -v oc logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ kubectl get nodes logger.go:42: 13:09:47 | demand-backup/19-delete-data | +++ grep '^minikube' logger.go:42: 13:09:48 | demand-backup/19-delete-data | +++ get_cluster_name logger.go:42: 13:09:48 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:09:48 | demand-backup/19-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 13:09:48 | demand-backup/19-delete-data | ++ local cluster=demand-backup logger.go:42: 13:09:48 | demand-backup/19-delete-data | ++ echo demand-backup-haproxy logger.go:42: 13:09:48 | demand-backup/19-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:09:48 | demand-backup/19-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 13:09:48 | demand-backup/19-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:09:48 | demand-backup/19-delete-data | + local pod= logger.go:42: 13:09:48 | demand-backup/19-delete-data | ++ get_client_pod logger.go:42: 13:09:48 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:09:49 | demand-backup/19-delete-data | + client_pod=mysql-client logger.go:42: 13:09:49 | demand-backup/19-delete-data | + wait_pod mysql-client logger.go:42: 13:09:49 | demand-backup/19-delete-data | + local pod=mysql-client logger.go:42: 13:09:49 | demand-backup/19-delete-data | + set +o xtrace logger.go:42: 13:09:49 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 13:09:49 | demand-backup/19-delete-data | + kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:09:49 | demand-backup/19-delete-data | + sed -e 's/mysql: //' logger.go:42: 13:09:49 | demand-backup/19-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:09:50 | demand-backup/19-delete-data | + : logger.go:42: 13:09:50 | demand-backup/19-delete-data | ++ get_cluster_name logger.go:42: 13:09:50 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:09:51 | demand-backup/19-delete-data | + cluster_name=demand-backup logger.go:42: 13:09:51 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 13:09:51 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:51 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:09:51 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:51 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 13:09:51 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 13:09:51 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:09:51 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 13:09:51 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 13:09:51 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 13:09:51 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 13:09:52 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 13:09:52 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:52 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:09:52 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:09:53 | demand-backup/19-delete-data | ++ : logger.go:42: 13:09:53 | demand-backup/19-delete-data | + data= logger.go:42: 13:09:53 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 12-delete-data-gcp-0 --from-literal=data= logger.go:42: 13:09:54 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-0 created logger.go:42: 13:09:54 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 13:09:54 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 13:09:54 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 13:09:54 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:09:54 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:09:56 | demand-backup/19-delete-data | ++ : logger.go:42: 13:09:56 | demand-backup/19-delete-data | + data= logger.go:42: 13:09:56 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 12-delete-data-gcp-1 --from-literal=data= logger.go:42: 13:09:56 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-1 created logger.go:42: 13:09:56 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 13:09:56 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:56 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:09:56 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:56 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 13:09:56 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 13:09:56 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:09:57 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 13:09:57 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 13:09:57 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 13:09:57 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 13:09:57 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 13:09:57 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:09:57 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:09:57 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:09:58 | demand-backup/19-delete-data | ++ : logger.go:42: 13:09:58 | demand-backup/19-delete-data | + data= logger.go:42: 13:09:58 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 12-delete-data-gcp-2 --from-literal=data= logger.go:42: 13:09:59 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-2 created logger.go:42: 13:10:00 | demand-backup/19-delete-data | test step completed 19-delete-data logger.go:42: 13:10:00 | demand-backup/20-restore-from-gcp | starting test step 20-restore-from-gcp logger.go:42: 13:10:00 | demand-backup/20-restore-from-gcp | PerconaServerMySQLRestore:kuttl-test-gentle-eft/demand-backup-restore-gcp created logger.go:42: 13:15:09 | demand-backup/20-restore-from-gcp | test step completed 20-restore-from-gcp logger.go:42: 13:15:09 | demand-backup/21-read-data | starting test step 21-read-data logger.go:42: 13:15:09 | demand-backup/21-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 13:15:09 | demand-backup/21-read-data | + source ../../functions logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ realpath ../../.. logger.go:42: 13:15:09 | demand-backup/21-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:15:09 | demand-backup/21-read-data | ++++ pwd logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:15:09 | demand-backup/21-read-data | ++ test_name=demand-backup logger.go:42: 13:15:09 | demand-backup/21-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:15:09 | demand-backup/21-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ GIT_BRANCH=PR-767 logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:15:09 | demand-backup/21-read-data | ++++ which gdate logger.go:42: 13:15:09 | demand-backup/21-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:15:09 | demand-backup/21-read-data | ++++ which date logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ date=/usr/bin/date logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ command -v oc logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ kubectl get nodes logger.go:42: 13:15:09 | demand-backup/21-read-data | +++ grep '^minikube' logger.go:42: 13:15:09 | demand-backup/21-read-data | ++ get_cluster_name logger.go:42: 13:15:09 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:15:10 | demand-backup/21-read-data | + cluster_name=demand-backup logger.go:42: 13:15:10 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ local pod= logger.go:42: 13:15:10 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 13:15:10 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 13:15:10 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:15:10 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:15:12 | demand-backup/21-read-data | + data=100500 logger.go:42: 13:15:12 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-gcp-0 --from-literal=data=100500 logger.go:42: 13:15:12 | demand-backup/21-read-data | configmap/06-read-data-gcp-0 created logger.go:42: 13:15:12 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 13:15:12 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:12 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:15:12 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:12 | demand-backup/21-read-data | ++ local pod= logger.go:42: 13:15:12 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 13:15:12 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:15:13 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 13:15:13 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 13:15:13 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 13:15:13 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 13:15:13 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 13:15:13 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:13 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:15:13 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:15:15 | demand-backup/21-read-data | + data=100500 logger.go:42: 13:15:15 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-gcp-1 --from-literal=data=100500 logger.go:42: 13:15:15 | demand-backup/21-read-data | configmap/06-read-data-gcp-1 created logger.go:42: 13:15:15 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 13:15:15 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:15 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:15:15 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:15 | demand-backup/21-read-data | ++ local pod= logger.go:42: 13:15:15 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 13:15:15 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:15:15 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 13:15:15 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 13:15:15 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 13:15:15 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 13:15:16 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 13:15:16 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:16 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:15:16 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:15:17 | demand-backup/21-read-data | + data=100500 logger.go:42: 13:15:17 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-gcp-2 --from-literal=data=100500 logger.go:42: 13:15:17 | demand-backup/21-read-data | configmap/06-read-data-gcp-2 created logger.go:42: 13:15:18 | demand-backup/21-read-data | test step completed 21-read-data logger.go:42: 13:15:18 | demand-backup/22-create-backup-azure | starting test step 22-create-backup-azure logger.go:42: 13:15:19 | demand-backup/22-create-backup-azure | PerconaServerMySQLBackup:kuttl-test-gentle-eft/demand-backup-azure created logger.go:42: 13:15:29 | demand-backup/22-create-backup-azure | test step completed 22-create-backup-azure logger.go:42: 13:15:29 | demand-backup/23-delete-data | starting test step 23-delete-data logger.go:42: 13:15:29 | demand-backup/23-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 16-delete-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 13:15:29 | demand-backup/23-delete-data | + source ../../functions logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ realpath ../../.. logger.go:42: 13:15:29 | demand-backup/23-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:15:29 | demand-backup/23-delete-data | ++++ pwd logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:15:29 | demand-backup/23-delete-data | ++ test_name=demand-backup logger.go:42: 13:15:29 | demand-backup/23-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:15:29 | demand-backup/23-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ GIT_BRANCH=PR-767 logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:15:29 | demand-backup/23-delete-data | ++++ which gdate logger.go:42: 13:15:29 | demand-backup/23-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:15:29 | demand-backup/23-delete-data | ++++ which date logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ date=/usr/bin/date logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ command -v oc logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ kubectl get nodes logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ grep '^minikube' logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ get_cluster_name logger.go:42: 13:15:29 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:15:30 | demand-backup/23-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 13:15:30 | demand-backup/23-delete-data | ++ local cluster=demand-backup logger.go:42: 13:15:30 | demand-backup/23-delete-data | ++ echo demand-backup-haproxy logger.go:42: 13:15:30 | demand-backup/23-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:15:30 | demand-backup/23-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 13:15:30 | demand-backup/23-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:15:30 | demand-backup/23-delete-data | + local pod= logger.go:42: 13:15:30 | demand-backup/23-delete-data | ++ get_client_pod logger.go:42: 13:15:30 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:15:30 | demand-backup/23-delete-data | + client_pod=mysql-client logger.go:42: 13:15:30 | demand-backup/23-delete-data | + wait_pod mysql-client logger.go:42: 13:15:30 | demand-backup/23-delete-data | + local pod=mysql-client logger.go:42: 13:15:30 | demand-backup/23-delete-data | + set +o xtrace logger.go:42: 13:15:31 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 13:15:31 | demand-backup/23-delete-data | + kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 13:15:31 | demand-backup/23-delete-data | + sed -e 's/mysql: //' logger.go:42: 13:15:31 | demand-backup/23-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:15:32 | demand-backup/23-delete-data | + : logger.go:42: 13:15:32 | demand-backup/23-delete-data | ++ get_cluster_name logger.go:42: 13:15:32 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:15:33 | demand-backup/23-delete-data | + cluster_name=demand-backup logger.go:42: 13:15:33 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 13:15:33 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 13:15:33 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 13:15:33 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:15:33 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:15:35 | demand-backup/23-delete-data | ++ : logger.go:42: 13:15:35 | demand-backup/23-delete-data | + data= logger.go:42: 13:15:35 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 16-delete-data-azure-0 --from-literal=data= logger.go:42: 13:15:35 | demand-backup/23-delete-data | configmap/16-delete-data-azure-0 created logger.go:42: 13:15:35 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 13:15:35 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:35 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:15:35 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:35 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 13:15:35 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 13:15:35 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:15:36 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 13:15:36 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 13:15:36 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 13:15:36 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 13:15:36 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 13:15:36 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:36 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:15:36 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ : logger.go:42: 13:15:38 | demand-backup/23-delete-data | + data= logger.go:42: 13:15:38 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 16-delete-data-azure-1 --from-literal=data= logger.go:42: 13:15:38 | demand-backup/23-delete-data | configmap/16-delete-data-azure-1 created logger.go:42: 13:15:38 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 13:15:38 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 13:15:38 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 13:15:38 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 13:15:39 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 13:15:39 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 13:15:39 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:15:39 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:15:40 | demand-backup/23-delete-data | ++ : logger.go:42: 13:15:40 | demand-backup/23-delete-data | + data= logger.go:42: 13:15:40 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-gentle-eft 16-delete-data-azure-2 --from-literal=data= logger.go:42: 13:15:41 | demand-backup/23-delete-data | configmap/16-delete-data-azure-2 created logger.go:42: 13:15:41 | demand-backup/23-delete-data | test step completed 23-delete-data logger.go:42: 13:15:41 | demand-backup/24-restore-from-azure | starting test step 24-restore-from-azure logger.go:42: 13:15:42 | demand-backup/24-restore-from-azure | PerconaServerMySQLRestore:kuttl-test-gentle-eft/demand-backup-restore-azure created logger.go:42: 13:21:01 | demand-backup/24-restore-from-azure | test step completed 24-restore-from-azure logger.go:42: 13:21:01 | demand-backup/25-read-data | starting test step 25-read-data logger.go:42: 13:21:01 | demand-backup/25-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 13:21:01 | demand-backup/25-read-data | + source ../../functions logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ realpath ../../.. logger.go:42: 13:21:01 | demand-backup/25-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:01 | demand-backup/25-read-data | ++++ pwd logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:21:01 | demand-backup/25-read-data | ++ test_name=demand-backup logger.go:42: 13:21:01 | demand-backup/25-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:21:01 | demand-backup/25-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ GIT_BRANCH=PR-767 logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:21:01 | demand-backup/25-read-data | ++++ which gdate logger.go:42: 13:21:01 | demand-backup/25-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:21:01 | demand-backup/25-read-data | ++++ which date logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ date=/usr/bin/date logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ command -v oc logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ kubectl get nodes logger.go:42: 13:21:01 | demand-backup/25-read-data | +++ grep '^minikube' logger.go:42: 13:21:01 | demand-backup/25-read-data | ++ get_cluster_name logger.go:42: 13:21:01 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-gentle-eft get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 13:21:02 | demand-backup/25-read-data | + cluster_name=demand-backup logger.go:42: 13:21:02 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 13:21:02 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:02 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:21:02 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:02 | demand-backup/25-read-data | ++ local pod= logger.go:42: 13:21:02 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 13:21:02 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:21:02 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 13:21:02 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 13:21:02 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 13:21:02 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 13:21:03 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 13:21:03 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:03 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:21:03 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:21:04 | demand-backup/25-read-data | + data=100500 logger.go:42: 13:21:04 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-azure-0 --from-literal=data=100500 logger.go:42: 13:21:05 | demand-backup/25-read-data | configmap/06-read-data-azure-0 created logger.go:42: 13:21:05 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ local pod= logger.go:42: 13:21:05 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 13:21:05 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 13:21:05 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:21:05 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:21:07 | demand-backup/25-read-data | + data=100500 logger.go:42: 13:21:07 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-azure-1 --from-literal=data=100500 logger.go:42: 13:21:07 | demand-backup/25-read-data | configmap/06-read-data-azure-1 created logger.go:42: 13:21:07 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 13:21:07 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:07 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 13:21:07 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:07 | demand-backup/25-read-data | ++ local pod= logger.go:42: 13:21:07 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 13:21:07 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-gentle-eft get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 13:21:08 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 13:21:08 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 13:21:08 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 13:21:08 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 13:21:08 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 13:21:08 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 13:21:08 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 13:21:08 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-gentle-eft exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 13:21:10 | demand-backup/25-read-data | + data=100500 logger.go:42: 13:21:10 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-gentle-eft 06-read-data-azure-2 --from-literal=data=100500 logger.go:42: 13:21:10 | demand-backup/25-read-data | configmap/06-read-data-azure-2 created logger.go:42: 13:21:11 | demand-backup/25-read-data | test step completed 25-read-data logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | starting test step 26-delete-all-backups logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete ps-backup --all -n "${NAMESPACE}" backup_name_minio="demand-backup-minio" accessKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_ACCESS_KEY_ID}' | base64 -d)" secretKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_SECRET_ACCESS_KEY}' | base64 -d)" backup_exists=$( kubectl run -n "${NAMESPACE}" -i --rm aws-cli --image=perconalab/awscli --restart=Never -- \ /usr/bin/env AWS_ACCESS_KEY_ID="${accessKey}" AWS_SECRET_ACCESS_KEY="${secretKey}" AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls 'operator-testing/' | grep -c "${backup_name_minio}/" | cat exit "${PIPESTATUS[0]}" ) if [[ 1 -eq $backup_exists ]]; then echo "Backup was not removed from bucket -- minio" exit 1 fi] logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | + source ../../functions logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ realpath ../../.. logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | ++++ pwd logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | ++ test_name=demand-backup logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ GIT_BRANCH=PR-767 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | ++++ which gdate logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | ++++ which date logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ date=/usr/bin/date logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ command -v oc logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ kubectl get nodes logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | +++ grep '^minikube' logger.go:42: 13:21:11 | demand-backup/26-delete-all-backups | + kubectl delete ps-backup --all -n kuttl-test-gentle-eft logger.go:42: 13:21:12 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-azure" deleted logger.go:42: 13:21:12 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-gcp" deleted logger.go:42: 13:21:12 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-minio" deleted logger.go:42: 13:21:12 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-s3" deleted logger.go:42: 13:21:16 | demand-backup/26-delete-all-backups | + backup_name_minio=demand-backup-minio logger.go:42: 13:21:16 | demand-backup/26-delete-all-backups | ++ kubectl -n kuttl-test-gentle-eft get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 13:21:16 | demand-backup/26-delete-all-backups | ++ base64 -d logger.go:42: 13:21:16 | demand-backup/26-delete-all-backups | + accessKey=some-access-key logger.go:42: 13:21:16 | demand-backup/26-delete-all-backups | ++ kubectl -n kuttl-test-gentle-eft get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 13:21:16 | demand-backup/26-delete-all-backups | ++ base64 -d logger.go:42: 13:21:17 | demand-backup/26-delete-all-backups | + secretKey=some-secret-key logger.go:42: 13:21:17 | demand-backup/26-delete-all-backups | ++ kubectl run -n kuttl-test-gentle-eft -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env AWS_ACCESS_KEY_ID=some-access-key AWS_SECRET_ACCESS_KEY=some-secret-key AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls operator-testing/ logger.go:42: 13:21:17 | demand-backup/26-delete-all-backups | ++ grep -c demand-backup-minio/ logger.go:42: 13:21:17 | demand-backup/26-delete-all-backups | ++ cat logger.go:42: 13:21:22 | demand-backup/26-delete-all-backups | ++ exit 0 logger.go:42: 13:21:22 | demand-backup/26-delete-all-backups | + backup_exists=0 logger.go:42: 13:21:22 | demand-backup/26-delete-all-backups | + [[ 1 -eq 0 ]] logger.go:42: 13:21:22 | demand-backup/26-delete-all-backups | test step completed 26-delete-all-backups logger.go:42: 13:21:22 | demand-backup/98-drop-finalizer | starting test step 98-drop-finalizer logger.go:42: 13:21:22 | demand-backup/98-drop-finalizer | PerconaServerMySQL:kuttl-test-gentle-eft/demand-backup updated logger.go:42: 13:21:22 | demand-backup/98-drop-finalizer | test step completed 98-drop-finalizer logger.go:42: 13:21:22 | demand-backup/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions destroy_operator] logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/tests/demand-backup logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | ++ test_name=demand-backup logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/vars.sh logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-767 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/deploy logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-767/e2e-tests/conf logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/ps/demand-backup logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-767 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-767 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export VERSION=PR-767-8e07c66d logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ VERSION=PR-767-8e07c66d logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-767-8e07c66d logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ export CERT_MANAGER_VER=1.15.1 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ CERT_MANAGER_VER=1.15.1 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-767/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | ++++ which date logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ date=/usr/bin/date logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ command -v oc logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ kubectl get nodes logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | +++ grep '^minikube' logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | + kubectl -n ps-operator delete deployment percona-server-mysql-operator --force --grace-period=0 logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 13:21:23 | demand-backup/99-remove-cluster-gracefully | deployment.apps "percona-server-mysql-operator" force deleted logger.go:42: 13:21:24 | demand-backup/99-remove-cluster-gracefully | + [[ -n ps-operator ]] logger.go:42: 13:21:24 | demand-backup/99-remove-cluster-gracefully | + kubectl delete namespace ps-operator --force --grace-period=0 logger.go:42: 13:21:24 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 13:21:24 | demand-backup/99-remove-cluster-gracefully | namespace "ps-operator" force deleted logger.go:42: 13:21:30 | demand-backup/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 13:21:30 | demand-backup | demand-backup events from ns kuttl-test-gentle-eft: logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:37 +0000 UTC Normal Pod mysql-client Scheduled Successfully assigned kuttl-test-gentle-eft/mysql-client to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:37 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulling Pulling image "percona/percona-server:8.0.33" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:43 +0000 UTC Normal ReplicaSet.apps minio-service-847fc8bb8d SuccessfulCreate Created pod: minio-service-847fc8bb8d-xn5dz replicaset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:43 +0000 UTC Normal PersistentVolumeClaim minio-service WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:43 +0000 UTC Normal Deployment.apps minio-service ScalingReplicaSet Scaled up replica set minio-service-847fc8bb8d to 1 deployment-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:43 +0000 UTC Normal PersistentVolumeClaim minio-service ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:43 +0000 UTC Normal PersistentVolumeClaim minio-service Provisioning External provisioner is provisioning volume for claim "kuttl-test-gentle-eft/minio-service" pd.csi.storage.gke.io_gke-f8b62841241c40fc85c1-fb59-6e75-vm_4dff289f-8ad7-4e19-868e-0a7dc424f4c9 logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:44 +0000 UTC Normal Pod minio-service-post-job-m4j9j Scheduled Successfully assigned kuttl-test-gentle-eft/minio-service-post-job-m4j9j to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:44 +0000 UTC Normal Job.batch minio-service-post-job SuccessfulCreate Created pod: minio-service-post-job-m4j9j job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:46 +0000 UTC Normal Pod minio-service-post-job-m4j9j.spec.containers{minio-make-user} Pulling Pulling image "quay.io/minio/mc:RELEASE.2023-09-29T16-41-22Z" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:47 +0000 UTC Normal Pod minio-service-847fc8bb8d-xn5dz Scheduled Successfully assigned kuttl-test-gentle-eft/minio-service-847fc8bb8d-xn5dz to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:47 +0000 UTC Normal PersistentVolumeClaim minio-service ProvisioningSucceeded Successfully provisioned volume pvc-8d1e394b-1c2c-4e76-b703-c45e6f6720c3 pd.csi.storage.gke.io_gke-f8b62841241c40fc85c1-fb59-6e75-vm_4dff289f-8ad7-4e19-868e-0a7dc424f4c9 logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:50 +0000 UTC Normal Pod minio-service-post-job-m4j9j.spec.containers{minio-make-user} Pulled Successfully pulled image "quay.io/minio/mc:RELEASE.2023-09-29T16-41-22Z" in 4.773s (4.773s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:50 +0000 UTC Normal Pod minio-service-post-job-m4j9j.spec.containers{minio-make-user} Created Created container minio-make-user kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:50 +0000 UTC Normal Pod minio-service-post-job-m4j9j.spec.containers{minio-make-user} Started Started container minio-make-user kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:55 +0000 UTC Normal Pod minio-service-847fc8bb8d-xn5dz SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8d1e394b-1c2c-4e76-b703-c45e6f6720c3" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:43:57 +0000 UTC Normal Pod minio-service-847fc8bb8d-xn5dz.spec.containers{minio} Pulling Pulling image "quay.io/minio/minio:RELEASE.2023-09-30T07-02-29Z" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:04 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulled Successfully pulled image "percona/percona-server:8.0.33" in 26.198s (26.198s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:04 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Created Created container mysql-client kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:04 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Started Started container mysql-client kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:07 +0000 UTC Normal Pod minio-service-847fc8bb8d-xn5dz.spec.containers{minio} Pulled Successfully pulled image "quay.io/minio/minio:RELEASE.2023-09-30T07-02-29Z" in 9.768s (9.768s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:07 +0000 UTC Normal Pod minio-service-847fc8bb8d-xn5dz.spec.containers{minio} Created Created container minio kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:07 +0000 UTC Normal Pod minio-service-847fc8bb8d-xn5dz.spec.containers{minio} Started Started container minio kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:20 +0000 UTC Normal Job.batch minio-service-post-job Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:21 +0000 UTC Normal Pod aws-cli Scheduled Successfully assigned kuttl-test-gentle-eft/aws-cli to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:22 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:25 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 3.592s (3.592s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:25 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container aws-cli kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:25 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:33 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:33 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:33 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-gentle-eft/datadir-demand-backup-mysql-0" pd.csi.storage.gke.io_gke-f8b62841241c40fc85c1-fb59-6e75-vm_4dff289f-8ad7-4e19-868e-0a7dc424f4c9 logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:33 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-0 Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:33 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:34 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:34 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 378ms (378ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:37 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7 pd.csi.storage.gke.io_gke-f8b62841241c40fc85c1-fb59-6e75-vm_4dff289f-8ad7-4e19-868e-0a7dc424f4c9 logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:37 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 2.96s (2.96s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 305ms (305ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:40 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:45 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 5.589s (5.589s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:53 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:44:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:15 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:15 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:15 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 5.536s (5.536s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:24 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 27.64s (27.64s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:25 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 2.555s (2.555s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 318ms (318ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 10.267s (10.267s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 12.586s (12.586s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:58 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:58 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-gentle-eft/datadir-demand-backup-mysql-1" pd.csi.storage.gke.io_gke-f8b62841241c40fc85c1-fb59-6e75-vm_4dff289f-8ad7-4e19-868e-0a7dc424f4c9 logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:58 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:58 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-1 Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:45:58 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:00 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:00 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:01 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:01 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 369ms (369ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:02 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-c536f7ed-3f65-48dd-b80f-8b7228a3037d pd.csi.storage.gke.io_gke-f8b62841241c40fc85c1-fb59-6e75-vm_4dff289f-8ad7-4e19-868e-0a7dc424f4c9 logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 352ms (352ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:02 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:04 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 2.474s (2.474s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 359ms (359ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:07 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:09 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 5.557s (5.557s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:09 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 329ms (329ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:10 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:10 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-c536f7ed-3f65-48dd-b80f-8b7228a3037d" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:11 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:11 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:11 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:11 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 376ms (376ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 372ms (372ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:14 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 4.268s (4.268s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:19 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 319ms (319ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:19 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:19 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:19 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:19 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 400ms (400ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:25 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 4.535s (4.535s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 188ms (188ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:39 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 26.072s (26.072s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:39 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:39 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:39 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 7.964s (7.964s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:46:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:01 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 13.281s (13.281s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:01 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:01 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:14 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 12:47:11 Peers: [3638353462303638.demand-backup-mysql-unready.kuttl-test-gentle-eft 3766386361353931.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 12:47:11 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:47:11 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 12:47:11 lookup demand-backup-mysql-1 [10.240.120.12] 2024/11/05 12:47:11 PodIP: 10.240.120.12 2024/11/05 12:47:11 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.5] 2024/11/05 12:47:11 PrimaryIP: 10.240.121.5 2024/11/05 12:47:11 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:47:11 Opening connection to 10.240.120.12 2024/11/05 12:47:11 Clone required: true 2024/11/05 12:47:11 Checking if a clone in progress 2024/11/05 12:47:11 Clone in progress: false 2024/11/05 12:47:11 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:47:14 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:18 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 109ms (109ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:51 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:51 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:51 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-gentle-eft/datadir-demand-backup-mysql-2" pd.csi.storage.gke.io_gke-f8b62841241c40fc85c1-fb59-6e75-vm_4dff289f-8ad7-4e19-868e-0a7dc424f4c9 logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:51 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-2 Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql success statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:51 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:55 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-36f29a65-04b0-44b7-9fa7-c72753bb16c8 pd.csi.storage.gke.io_gke-f8b62841241c40fc85c1-fb59-6e75-vm_4dff289f-8ad7-4e19-868e-0a7dc424f4c9 logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:47:55 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:03 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-36f29a65-04b0-44b7-9fa7-c72753bb16c8" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 201ms (201ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:06 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 24.352s (24.352s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 8.919s (8.919s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 9.649s (9.649s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:57 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 12:48:54 Waiting for bootstrap.lock to be deleted 2024/11/05 12:48:57 Peers: [3236633631626132.demand-backup-mysql-unready.kuttl-test-gentle-eft 3638353462303638.demand-backup-mysql-unready.kuttl-test-gentle-eft 3766386361353931.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 12:48:57 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:48:57 bootstrap finished in 0.004682 seconds 2024/11/05 12:48:57 bootstrap failed: select donor: connect to 3236633631626132.demand-backup-mysql-unready.kuttl-test-gentle-eft: ping DB: dial tcp 10.240.122.12:33062: connect: connection refused kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:48:57 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:49:05 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 122ms (122ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:49:25 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 12:49:24 Peers: [3236633631626132.demand-backup-mysql-unready.kuttl-test-gentle-eft 3638353462303638.demand-backup-mysql-unready.kuttl-test-gentle-eft 3766386361353931.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 12:49:24 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:49:24 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 12:49:24 lookup demand-backup-mysql-2 [10.240.122.12] 2024/11/05 12:49:24 PodIP: 10.240.122.12 2024/11/05 12:49:24 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.5] 2024/11/05 12:49:24 PrimaryIP: 10.240.121.5 2024/11/05 12:49:24 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:49:24 Opening connection to 10.240.122.12 2024/11/05 12:49:24 Clone required: true 2024/11/05 12:49:24 Checking if a clone in progress 2024/11/05 12:49:24 Clone in progress: false 2024/11/05 12:49:24 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:49:25 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:08 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:09 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:12 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/11/05 12:50:12 readiness check failed: connect to db: ping DB: dial tcp 10.240.121.5:33062: connect: connection refused kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:17 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/11/05 12:50:17 readiness check failed: connect to db: ping DB: dial tcp 10.240.121.5:33062: connect: connection refused kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:22 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:29 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 169ms (169ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 122ms (122ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 114ms (114ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 117ms (117ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:32 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:33 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:51 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 12:50:50 Peers: [3236633631626132.demand-backup-mysql-unready.kuttl-test-gentle-eft 3638353462303638.demand-backup-mysql-unready.kuttl-test-gentle-eft 6232343637343163.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 12:50:50 FQDN: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:50:50 Primary: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 12:50:50 lookup demand-backup-mysql-0 [10.240.121.8] 2024/11/05 12:50:50 PodIP: 10.240.121.8 2024/11/05 12:50:50 lookup demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft [10.240.120.12] 2024/11/05 12:50:50 PrimaryIP: 10.240.120.12 2024/11/05 12:50:50 Donor: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:50:50 Opening connection to 10.240.121.8 2024/11/05 12:50:50 Clone required: true 2024/11/05 12:50:50 Checking if a clone in progress 2024/11/05 12:50:50 Clone in progress: false 2024/11/05 12:50:50 Cloning from demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:50:51 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:51 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:50:55 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 307ms (307ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:42 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8 Scheduled Successfully assigned kuttl-test-gentle-eft/xb-demand-backup-minio-minio-6mfj8 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:42 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:42 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio SuccessfulCreate Created pod: xb-demand-backup-minio-minio-6mfj8 job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:43 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 383ms (383ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:43 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:43 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:45 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:45 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 351ms (351ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:45 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:45 +0000 UTC Normal Pod xb-demand-backup-minio-minio-6mfj8.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:48 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup AsyncReplicationNotReady demand-backup-mysql-0: [not_replicating] ps-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:51:52 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:34 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:34 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:35 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:35 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:35 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:35 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:35 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:35 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:36 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:36 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:38 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:41 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/11/05 12:52:41 readiness check failed: connect to db: ping DB: dial tcp 10.240.120.12:33062: connect: connection refused kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:58 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:58 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:58 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:52:59 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:53:05 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:53:05 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:53:05 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:53:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:53:37 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:53:37 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful statefulset-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:10 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7 Scheduled Successfully assigned kuttl-test-gentle-eft/xb-restore-demand-backup-restore-minio-99tx7 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:10 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-99tx7 job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:18 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:19 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:19 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 359ms (359ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:19 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:19 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:21 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:22 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 313ms (313ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:22 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:22 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-99tx7.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:32 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:32 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:32 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:32 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:32 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 357ms (357ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:34 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 314ms (314ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 316ms (316ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:35 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:53 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 356ms (356ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:54 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 331ms (331ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 313ms (313ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 302ms (302ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:54:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:07 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 369ms (369ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:10 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:10 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 408ms (408ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:10 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:10 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:10 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:11 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 317ms (317ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:11 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:11 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:29 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:34 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:34 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 364ms (364ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:35 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:36 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-c536f7ed-3f65-48dd-b80f-8b7228a3037d" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:37 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:37 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 317ms (317ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:37 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:37 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:37 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:37 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 335ms (335ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:37 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:37 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:38 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:38 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 380ms (380ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:38 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 388ms (388ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:39 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 322ms (322ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:40 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 337ms (337ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 333ms (333ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 305ms (305ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 349ms (349ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:42 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:42 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:42 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:42 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 350ms (350ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:43 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:43 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 386ms (386ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:45 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:45 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 304ms (304ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:45 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:45 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:45 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 327ms (327ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 311ms (311ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 301ms (301ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:46 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:47 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:59 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 12:55:58 Peers: [6430633435666466.demand-backup-mysql-unready.kuttl-test-gentle-eft 6635666531663335.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 12:55:58 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:55:58 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 12:55:58 lookup demand-backup-mysql-1 [10.240.120.15] 2024/11/05 12:55:58 PodIP: 10.240.120.15 2024/11/05 12:55:58 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.9] 2024/11/05 12:55:58 PrimaryIP: 10.240.121.9 2024/11/05 12:55:58 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:55:58 Opening connection to 10.240.120.15 2024/11/05 12:55:58 Clone required: true 2024/11/05 12:55:58 Checking if a clone in progress 2024/11/05 12:55:58 Clone in progress: false 2024/11/05 12:55:58 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:55:59 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:55:59 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:02 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 340ms (340ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:33 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:40 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-36f29a65-04b0-44b7-9fa7-c72753bb16c8" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 393ms (393ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 322ms (322ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:44 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 329ms (329ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 369ms (369ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:56:45 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:57:03 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 12:57:02 Peers: [3663626630373037.demand-backup-mysql-unready.kuttl-test-gentle-eft 6430633435666466.demand-backup-mysql-unready.kuttl-test-gentle-eft 6635666531663335.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 12:57:02 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:57:02 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 12:57:02 lookup demand-backup-mysql-2 [10.240.122.17] 2024/11/05 12:57:02 PodIP: 10.240.122.17 2024/11/05 12:57:02 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.9] 2024/11/05 12:57:02 PrimaryIP: 10.240.121.9 2024/11/05 12:57:02 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:57:02 Opening connection to 10.240.122.17 2024/11/05 12:57:02 Clone required: true 2024/11/05 12:57:02 Checking if a clone in progress 2024/11/05 12:57:02 Clone in progress: false 2024/11/05 12:57:02 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 12:57:03 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:57:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:57:06 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 167ms (167ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:39 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/11/05 12:58:39 readiness check failed: connect to db: ping DB: dial tcp 10.240.121.9:33062: connect: connection refused kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:44 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:58:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:59:29 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:59:29 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 12:59:31 +0000 UTC Warning Pod demand-backup-orc-1.spec.containers{orc} Unhealthy Readiness probe failed: Get "http://10.240.120.14:3000/api/health": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:02 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v Scheduled Successfully assigned kuttl-test-gentle-eft/xb-restore-demand-backup-restore-minio-backup-source-tv52v to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:02 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-backup-source-tv52v job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:10 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:11 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:11 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 365ms (365ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:11 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:11 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:12 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:13 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 120ms (120ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:13 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:13 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-tv52v.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:23 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:23 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:23 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:23 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:23 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:24 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 166ms (166ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:24 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:24 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 125ms (125ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 149ms (149ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:26 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:40 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 386ms (386ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 141ms (141ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 116ms (116ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 111ms (111ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:58 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:59 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:59 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 167ms (167ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:59 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:00:59 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 116ms (116ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 265ms (265ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:01 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:02 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:16 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:18 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 363ms (363ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:18 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:19 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:21 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:21 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 330ms (330ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:21 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:21 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:21 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:21 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 108ms (108ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:21 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:21 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:22 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 369ms (369ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:24 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-c536f7ed-3f65-48dd-b80f-8b7228a3037d" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:25 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:25 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 334ms (334ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:25 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:25 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:25 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 336ms (336ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 1.399s (1.399s including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 178ms (178ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 117ms (117ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:27 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 316ms (316ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 308ms (308ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:29 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:29 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 302ms (302ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:29 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:29 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:29 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 123ms (124ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:30 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:34 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:35 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 931ms (931ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:35 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:35 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 308ms (308ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 307ms (308ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:38 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:46 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 13:01:45 Peers: [3332366431663733.demand-backup-mysql-unready.kuttl-test-gentle-eft 6665373564633332.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 13:01:45 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:01:45 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 13:01:45 lookup demand-backup-mysql-1 [10.240.120.18] 2024/11/05 13:01:45 PodIP: 10.240.120.18 2024/11/05 13:01:45 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.12] 2024/11/05 13:01:45 PrimaryIP: 10.240.121.12 2024/11/05 13:01:45 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:01:45 Opening connection to 10.240.120.18 2024/11/05 13:01:45 Clone required: true 2024/11/05 13:01:45 Checking if a clone in progress 2024/11/05 13:01:45 Clone in progress: false 2024/11/05 13:01:45 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:01:46 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:46 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:01:51 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 893ms (893ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:25 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:33 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-36f29a65-04b0-44b7-9fa7-c72753bb16c8" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:34 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:35 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 919ms (919ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:35 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:35 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 349ms (349ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 322ms (322ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 316ms (316ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:55 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 13:02:54 Peers: [3332366431663733.demand-backup-mysql-unready.kuttl-test-gentle-eft 3365613435316134.demand-backup-mysql-unready.kuttl-test-gentle-eft 6665373564633332.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 13:02:54 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:02:54 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 13:02:54 lookup demand-backup-mysql-2 [10.240.122.21] 2024/11/05 13:02:54 PodIP: 10.240.122.21 2024/11/05 13:02:54 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.12] 2024/11/05 13:02:54 PrimaryIP: 10.240.121.12 2024/11/05 13:02:54 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:02:54 Opening connection to 10.240.122.21 2024/11/05 13:02:54 Clone required: true 2024/11/05 13:02:54 Checking if a clone in progress 2024/11/05 13:02:54 Clone in progress: false 2024/11/05 13:02:54 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:02:55 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:02:55 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 915ms (915ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:47 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9 Scheduled Successfully assigned kuttl-test-gentle-eft/xb-demand-backup-s3-aws-s3-hptt9 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:47 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:47 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 SuccessfulCreate Created pod: xb-demand-backup-s3-aws-s3-hptt9 job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:48 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 476ms (476ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:48 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:48 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:49 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:49 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 316ms (316ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:50 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:50 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-hptt9.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:03:58 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:16 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:16 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:16 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:16 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:17 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:17 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:17 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:17 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:26 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/11/05 13:04:26 readiness check failed: connect to db: ping DB: dial tcp 10.240.121.12:33062: connect: connection refused kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:31 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:46 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:04:46 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:05:17 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:05:17 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:05:53 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6 Scheduled Successfully assigned kuttl-test-gentle-eft/xb-restore-demand-backup-restore-s3-kl9b6 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:05:53 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 SuccessfulCreate Created pod: xb-restore-demand-backup-restore-s3-kl9b6 job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:00 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:03 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:03 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 380ms (380ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:03 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:03 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:05 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:05 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 385ms (385ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:05 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:05 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-kl9b6.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:19 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:19 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:19 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:19 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:20 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:20 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 364ms (364ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:20 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:20 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:22 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:22 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 316ms (316ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:22 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:22 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:22 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:22 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 324ms (324ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:22 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:23 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:34 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 351ms (351ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:36 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 303ms (303ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:38 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 316ms (316ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 298ms (298ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:54 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:55 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:55 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 381ms (381ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:55 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:55 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 314ms (314ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:58 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 325ms (325ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:58 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:06:58 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:10 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:12 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:12 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:13 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 388ms (388ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:13 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:13 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:14 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:14 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 336ms (336ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:14 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:14 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:14 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:15 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 274ms (274ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:15 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:15 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:15 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:16 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 361ms (361ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:16 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:16 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 319ms (319ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:18 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:18 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-c536f7ed-3f65-48dd-b80f-8b7228a3037d" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:19 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:19 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 375ms (375ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:19 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:19 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:19 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:19 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 351ms (351ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 367ms (367ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:22 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:22 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 345ms (345ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:22 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:22 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:22 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 345ms (345ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:22 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 335ms (335ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 341ms (341ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 293ms (293ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:30 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:30 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:31 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 329ms (329ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:31 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:31 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:33 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:33 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 340ms (340ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:33 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:33 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:33 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 295ms (295ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:34 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:41 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 13:07:39 Peers: [6165313237613139.demand-backup-mysql-unready.kuttl-test-gentle-eft 6463613166633434.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 13:07:39 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:07:39 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 13:07:39 lookup demand-backup-mysql-1 [10.240.120.22] 2024/11/05 13:07:39 PodIP: 10.240.120.22 2024/11/05 13:07:39 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.15] 2024/11/05 13:07:39 PrimaryIP: 10.240.121.15 2024/11/05 13:07:39 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:07:39 Opening connection to 10.240.120.22 2024/11/05 13:07:40 Clone required: true 2024/11/05 13:07:40 Checking if a clone in progress 2024/11/05 13:07:40 Clone in progress: false 2024/11/05 13:07:40 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:07:41 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:07:41 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:15 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:22 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-36f29a65-04b0-44b7-9fa7-c72753bb16c8" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 344ms (344ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:28 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:28 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 318ms (318ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 298ms (298ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 306ms (306ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:47 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 13:08:46 Peers: [3661323062636433.demand-backup-mysql-unready.kuttl-test-gentle-eft 6165313237613139.demand-backup-mysql-unready.kuttl-test-gentle-eft 6463613166633434.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 13:08:46 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:08:46 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 13:08:46 lookup demand-backup-mysql-2 [10.240.122.26] 2024/11/05 13:08:46 PodIP: 10.240.122.26 2024/11/05 13:08:46 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.15] 2024/11/05 13:08:46 PrimaryIP: 10.240.121.15 2024/11/05 13:08:46 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:08:46 Opening connection to 10.240.122.26 2024/11/05 13:08:46 Clone required: true 2024/11/05 13:08:46 Checking if a clone in progress 2024/11/05 13:08:46 Clone in progress: false 2024/11/05 13:08:46 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:08:47 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:08:47 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:37 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv Scheduled Successfully assigned kuttl-test-gentle-eft/xb-demand-backup-gcp-gcp-cs-75xkv to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:37 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs SuccessfulCreate Created pod: xb-demand-backup-gcp-gcp-cs-75xkv job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:38 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:38 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 351ms (351ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:38 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:38 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:39 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:40 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 302ms (302ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:40 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:40 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-75xkv.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:42 +0000 UTC Warning PerconaServerMySQL.ps.percona.com demand-backup AsyncReplicationNotReady demand-backup-mysql-1: [not_replicating] ps-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:09:47 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:00 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:00 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:01 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:01 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:02 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:05 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:10 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/11/05 13:10:10 readiness check failed: connect to db: ping DB: dial tcp 10.240.121.15:33062: connect: connection refused kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:15 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:32 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:10:32 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:38 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v Scheduled Successfully assigned kuttl-test-gentle-eft/xb-restore-demand-backup-restore-gcp-2t89v to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:38 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp SuccessfulCreate Created pod: xb-restore-demand-backup-restore-gcp-2t89v job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:45 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:47 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:47 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 416ms (416ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:47 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:47 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 396ms (396ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:11:49 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-2t89v.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:02 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:02 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:03 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:03 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:03 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:04 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 341ms (341ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:04 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:04 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:05 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:05 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 330ms (330ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:05 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:05 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:05 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:06 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 310ms (310ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:06 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:06 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:24 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 369ms (369ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:25 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:26 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 309ms (309ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:27 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 320ms (320ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 305ms (305ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:38 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:39 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:39 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 370ms (370ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:39 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:39 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:41 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:41 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 358ms (358ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:41 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:42 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:42 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:42 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 316ms (316ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:42 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:12:42 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:00 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:00 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 350ms (350ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:01 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:03 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:03 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 302ms (302ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:03 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 317ms (317ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:04 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:04 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 358ms (358ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:05 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 294ms (294ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:07 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 328ms (328ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:08 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:08 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:08 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:08 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-c536f7ed-3f65-48dd-b80f-8b7228a3037d" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:09 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 396ms (396ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:09 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:09 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:09 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:09 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 380ms (380ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:09 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:11 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:11 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 318ms (318ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:11 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 364ms (364ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 374ms (374ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 393ms (393ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 347ms (347ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:13 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:14 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 367ms (367ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:14 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:16 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:16 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 369ms (369ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:16 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:16 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:16 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:17 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 387ms (387ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:17 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:17 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:30 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 13:13:29 Peers: [3833343166386539.demand-backup-mysql-unready.kuttl-test-gentle-eft 39333264633565.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 13:13:29 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:13:29 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 13:13:29 lookup demand-backup-mysql-1 [10.240.120.25] 2024/11/05 13:13:29 PodIP: 10.240.120.25 2024/11/05 13:13:29 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.18] 2024/11/05 13:13:29 PrimaryIP: 10.240.121.18 2024/11/05 13:13:29 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:13:29 Opening connection to 10.240.120.25 2024/11/05 13:13:29 Clone required: true 2024/11/05 13:13:29 Checking if a clone in progress 2024/11/05 13:13:29 Clone in progress: false 2024/11/05 13:13:29 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:13:30 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:13:33 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 317ms (317ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:04 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:12 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-36f29a65-04b0-44b7-9fa7-c72753bb16c8" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 367ms (367ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:15 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 325ms (325ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 307ms (307ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:16 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:17 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 311ms (311ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:17 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:17 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:34 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 13:14:33 Peers: [3833343166386539.demand-backup-mysql-unready.kuttl-test-gentle-eft 39333264633565.demand-backup-mysql-unready.kuttl-test-gentle-eft 6135633336663433.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 13:14:33 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:14:33 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 13:14:33 lookup demand-backup-mysql-2 [10.240.122.31] 2024/11/05 13:14:33 PodIP: 10.240.122.31 2024/11/05 13:14:33 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.18] 2024/11/05 13:14:33 PrimaryIP: 10.240.121.18 2024/11/05 13:14:33 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:14:33 Opening connection to 10.240.122.31 2024/11/05 13:14:33 Clone required: true 2024/11/05 13:14:33 Checking if a clone in progress 2024/11/05 13:14:33 Clone in progress: false 2024/11/05 13:14:33 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:14:34 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:34 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:14:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 295ms (295ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:19 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4 Scheduled Successfully assigned kuttl-test-gentle-eft/xb-demand-backup-azure-azure-blob-547h4 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:19 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:19 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob SuccessfulCreate Created pod: xb-demand-backup-azure-azure-blob-547h4 job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:20 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 365ms (365ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:20 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:20 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:22 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:22 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 314ms (314ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:22 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:22 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-547h4.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:28 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:15:55 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/11/05 13:15:55 readiness check failed: connect to db: ping DB: dial tcp 10.240.121.18:33062: connect: connection refused kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:16:00 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:16:14 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:16:14 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:16:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:16:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:16 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d Scheduled Successfully assigned kuttl-test-gentle-eft/xb-restore-demand-backup-restore-azure-n6f4d to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:16 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure SuccessfulCreate Created pod: xb-restore-demand-backup-restore-azure-n6f4d job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:27 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:28 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 362ms (362ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:28 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:28 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:29 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:29 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 318ms (318ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:29 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:29 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-n6f4d.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:44 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:44 +0000 UTC Warning Pod demand-backup-mysql-0 FailedAttachVolume Multi-Attach error for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:44 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:44 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure Completed Job completed job-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 381ms (381ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 324ms (324ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:47 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 332ms (332ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:17:48 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:02 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8c0b354b-66ae-4309-adf6-7847bfeb96c7" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 379ms (379ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 299ms (299ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 310ms (310ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 327ms (327ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:20 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 381ms (381ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 331ms (331ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:22 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 336ms (336ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:38 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:41 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-0 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:41 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 372ms (372ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:42 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:43 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 313ms (313ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 315ms (315ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:44 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:44 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-1 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 359ms (359ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:45 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:45 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-c536f7ed-3f65-48dd-b80f-8b7228a3037d" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 421ms (421ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:47 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:48 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 408ms (408ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:48 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:48 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:49 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-haproxy-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-tk71 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:49 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:50 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:50 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 367ms (367ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:50 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:50 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 375ms (375ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:50 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:52 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 345ms (345ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 330ms (330ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 380ms (380ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 381ms (381ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:54 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 288ms (288ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:54 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:54 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:55 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-orc-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-xtd0 default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 312ms (312ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:58 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:58 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 294ms (294ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:58 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:58 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:58 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 310ms (310ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:18:59 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:19:10 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 13:19:09 Peers: [6634333064393464.demand-backup-mysql-unready.kuttl-test-gentle-eft 6637626365636363.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 13:19:09 FQDN: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:19:09 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 13:19:09 lookup demand-backup-mysql-1 [10.240.120.27] 2024/11/05 13:19:09 PodIP: 10.240.120.27 2024/11/05 13:19:09 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.21] 2024/11/05 13:19:09 PrimaryIP: 10.240.121.21 2024/11/05 13:19:09 Donor: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:19:09 Opening connection to 10.240.120.27 2024/11/05 13:19:09 Clone required: true 2024/11/05 13:19:09 Checking if a clone in progress 2024/11/05 13:19:09 Clone in progress: false 2024/11/05 13:19:09 Cloning from demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:19:10 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:19:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:19:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 322ms (322ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:19:49 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-gentle-eft/demand-backup-mysql-2 to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:00 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-36f29a65-04b0-44b7-9fa7-c72753bb16c8" attachdetach-controller logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:01 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:01 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-767-8e07c66d" in 361ms (361ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:01 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:01 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 311ms (311ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 318ms (318ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 317ms (317ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:04 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:22 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: 2024/11/05 13:20:21 Peers: [3661383463303134.demand-backup-mysql-unready.kuttl-test-gentle-eft 6634333064393464.demand-backup-mysql-unready.kuttl-test-gentle-eft 6637626365636363.demand-backup-mysql-unready.kuttl-test-gentle-eft] 2024/11/05 13:20:21 FQDN: demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:20:21 Primary: demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft Replicas: [demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft demand-backup-mysql-2.demand-backup-mysql.kuttl-test-gentle-eft] 2024/11/05 13:20:21 lookup demand-backup-mysql-2 [10.240.122.36] 2024/11/05 13:20:21 PodIP: 10.240.122.36 2024/11/05 13:20:21 lookup demand-backup-mysql-0.demand-backup-mysql.kuttl-test-gentle-eft [10.240.121.21] 2024/11/05 13:20:21 PrimaryIP: 10.240.121.21 2024/11/05 13:20:21 Donor: demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:20:21 Opening connection to 10.240.122.36 2024/11/05 13:20:21 Clone required: true 2024/11/05 13:20:21 Checking if a clone in progress 2024/11/05 13:20:21 Clone in progress: false 2024/11/05 13:20:21 Cloning from demand-backup-mysql-1.demand-backup-mysql.kuttl-test-gentle-eft 2024/11/05 13:20:22 Clone finished. Restarting container... kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:22 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:20:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 317ms (317ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:17 +0000 UTC Normal Pod aws-cli Scheduled Successfully assigned kuttl-test-gentle-eft/aws-cli to gke-jen-ps-767-8e07c66d--default-pool-25830ec2-6n8x default-scheduler logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:18 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:18 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 309ms (309ms including waiting) kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:18 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container aws-cli kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:18 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:23 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit kubelet logger.go:42: 13:21:30 | demand-backup | 2024-11-05 13:21:28 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2024/11/05 13:21:28 readiness check failed: connect to db: ping DB: dial tcp 10.240.121.21:33062: connect: connection refused kubelet logger.go:42: 13:21:31 | demand-backup | Deleting namespace: kuttl-test-gentle-eft === NAME kuttl harness.go:407: run tests finished harness.go:515: cleaning up harness.go:572: removing temp folder: "" --- PASS: kuttl (2327.85s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup (2327.40s) PASS