=== RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://35.232.64.27 I0817 14:06:24.271103 13246 request.go:682] Waited for 1.0273855s due to client-side throttling, not priority and fairness, request: GET:https://35.232.64.27/apis/networking.gke.io/v1beta2?timeout=32s harness.go:360: running tests harness.go:73: going to run test suite with timeout of 180 seconds for each step harness.go:372: testsuite: e2e-tests/tests has 28 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup === PAUSE kuttl/harness/demand-backup === CONT kuttl/harness/demand-backup logger.go:42: 14:06:28 | demand-backup | Creating namespace: kuttl-test-hardy-killdeer logger.go:42: 14:06:28 | demand-backup/0-minio-secret | starting test step 0-minio-secret logger.go:42: 14:06:32 | demand-backup/0-minio-secret | Secret:kuttl-test-hardy-killdeer/minio-secret created logger.go:42: 14:06:32 | demand-backup/0-minio-secret | test step completed 0-minio-secret logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | starting test step 1-deploy-operator logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl -n "${NAMESPACE}" apply -f "${TESTS_CONFIG_DIR}/cloud-secret.yml" deploy_operator deploy_non_tls_cluster_secrets deploy_tls_cluster_secrets deploy_client deploy_minio] logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | + source ../../functions logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ realpath ../../.. logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | ++++ mktemp -d logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export TEMP_DIR=/tmp/tmp.37f1BA7pY5 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ TEMP_DIR=/tmp/tmp.37f1BA7pY5 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ GIT_BRANCH=PR-424 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export VERSION=PR-424-70568ae logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ VERSION=PR-424-70568ae logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | ++++ which gdate logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | ++++ which date logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ date=/usr/bin/date logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ command -v oc logger.go:42: 14:06:32 | demand-backup/1-deploy-operator | +++ oc get projects logger.go:42: 14:06:39 | demand-backup/1-deploy-operator | error: the server doesn't have a resource type "projects" logger.go:42: 14:06:39 | demand-backup/1-deploy-operator | +++ grep '^minikube' logger.go:42: 14:06:39 | demand-backup/1-deploy-operator | +++ kubectl get nodes logger.go:42: 14:06:40 | demand-backup/1-deploy-operator | ++++ pwd logger.go:42: 14:06:40 | demand-backup/1-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:06:40 | demand-backup/1-deploy-operator | ++ test_name=demand-backup logger.go:42: 14:06:40 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-hardy-killdeer apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf/cloud-secret.yml logger.go:42: 14:06:43 | demand-backup/1-deploy-operator | secret/aws-s3-secret created logger.go:42: 14:06:44 | demand-backup/1-deploy-operator | secret/gcp-cs-secret created logger.go:42: 14:06:44 | demand-backup/1-deploy-operator | secret/azure-secret created logger.go:42: 14:06:44 | demand-backup/1-deploy-operator | + deploy_operator logger.go:42: 14:06:44 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-hardy-killdeer apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy/crd.yaml logger.go:42: 14:06:45 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlbackups.ps.percona.com serverside-applied logger.go:42: 14:06:45 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqlrestores.ps.percona.com serverside-applied logger.go:42: 14:06:46 | demand-backup/1-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconaservermysqls.ps.percona.com serverside-applied logger.go:42: 14:06:46 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-hardy-killdeer apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy/rbac.yaml logger.go:42: 14:06:48 | demand-backup/1-deploy-operator | serviceaccount/percona-server-mysql-operator created logger.go:42: 14:06:48 | demand-backup/1-deploy-operator | serviceaccount/percona-server-mysql-operator-orchestrator created logger.go:42: 14:06:48 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 14:06:49 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 14:06:49 | demand-backup/1-deploy-operator | role.rbac.authorization.k8s.io/percona-server-mysql-operator-orchestrator created logger.go:42: 14:06:50 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-leaderelection created logger.go:42: 14:06:50 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator created logger.go:42: 14:06:50 | demand-backup/1-deploy-operator | rolebinding.rbac.authorization.k8s.io/percona-server-mysql-operator-orchestrator created logger.go:42: 14:06:50 | demand-backup/1-deploy-operator | + yq eval '(select(documentIndex==1).spec.template.spec.containers[] | select(.name=="manager").env[] | select(.name=="DISABLE_TELEMETRY").value) = "true"' logger.go:42: 14:06:50 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-hardy-killdeer apply -f - logger.go:42: 14:06:50 | demand-backup/1-deploy-operator | ++ printf 'select(documentIndex==1).spec.template.spec.containers[0].image="%s"' perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:06:50 | demand-backup/1-deploy-operator | + yq eval 'select(documentIndex==1).spec.template.spec.containers[0].image="perconalab/percona-server-mysql-operator:PR-424-70568ae"' /mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy/operator.yaml logger.go:42: 14:06:53 | demand-backup/1-deploy-operator | configmap/percona-server-mysql-operator-config created logger.go:42: 14:06:54 | demand-backup/1-deploy-operator | deployment.apps/percona-server-mysql-operator created logger.go:42: 14:06:54 | demand-backup/1-deploy-operator | + deploy_non_tls_cluster_secrets logger.go:42: 14:06:54 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-hardy-killdeer apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf/secrets.yaml logger.go:42: 14:06:55 | demand-backup/1-deploy-operator | secret/test-secrets created logger.go:42: 14:06:55 | demand-backup/1-deploy-operator | + deploy_tls_cluster_secrets logger.go:42: 14:06:55 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-hardy-killdeer apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf/ssl-secret.yaml logger.go:42: 14:06:56 | demand-backup/1-deploy-operator | secret/test-ssl created logger.go:42: 14:06:56 | demand-backup/1-deploy-operator | + deploy_client logger.go:42: 14:06:56 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-hardy-killdeer apply -f /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf/client.yaml logger.go:42: 14:06:57 | demand-backup/1-deploy-operator | pod/mysql-client created logger.go:42: 14:06:57 | demand-backup/1-deploy-operator | + deploy_minio logger.go:42: 14:06:57 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-hardy-killdeer get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 14:06:57 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 14:06:58 | demand-backup/1-deploy-operator | + local 'accessKey=some-access$\n"-key' logger.go:42: 14:06:58 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-hardy-killdeer get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 14:06:58 | demand-backup/1-deploy-operator | ++ base64 -d logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + local 'secretKey=some-$\n"secret-key' logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + helm uninstall -n kuttl-test-hardy-killdeer minio-service logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-424/kubeconfig logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-424/kubeconfig logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | Error: uninstall: Release not loaded: minio-service: release: not found logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + : logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + helm repo remove minio logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-424/kubeconfig logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-424/kubeconfig logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | Error: no repositories configured logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + : logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + helm repo add minio https://helm.min.io/ logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-424/kubeconfig logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-424/kubeconfig logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | "minio" has been added to your repositories logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | ++ printf %q 'some-access$\n"-key' logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | ++ printf %q 'some-$\n"secret-key' logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + retry 10 60 helm install minio-service -n kuttl-test-hardy-killdeer --version 8.0.5 --set 'accessKey=some-access\$\\n\"-key' --set 'secretKey=some-\$\\n\"secret-key' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set environment.MINIO_REGION=us-east-1 --set environment.MINIO_HTTP_TRACE=/tmp/trace.log --set securityContext.enabled=false minio/minio logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + local max=10 logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + local delay=60 logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + shift 2 logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + local n=1 logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | + helm install minio-service -n kuttl-test-hardy-killdeer --version 8.0.5 --set 'accessKey=some-access\$\\n\"-key' --set 'secretKey=some-\$\\n\"secret-key' --set service.type=ClusterIP --set configPathmc=/tmp/.minio/ --set persistence.size=2G --set environment.MINIO_REGION=us-east-1 --set environment.MINIO_HTTP_TRACE=/tmp/trace.log --set securityContext.enabled=false minio/minio logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is group-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-424/kubeconfig logger.go:42: 14:07:00 | demand-backup/1-deploy-operator | WARNING: Kubernetes configuration file is world-readable. This is insecure. Location: /mnt/jenkins/workspace/cloud-ps-operator_PR-424/kubeconfig logger.go:42: 14:07:01 | demand-backup/1-deploy-operator | E0817 14:07:01.650484 13958 memcache.go:287] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 14:07:01 | demand-backup/1-deploy-operator | E0817 14:07:01.870005 13958 memcache.go:121] couldn't get resource list for metrics.k8s.io/v1beta1: the server is currently unable to handle the request logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | NAME: minio-service logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | LAST DEPLOYED: Thu Aug 17 14:07:01 2023 logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | NAMESPACE: kuttl-test-hardy-killdeer logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | STATUS: deployed logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | REVISION: 1 logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | TEST SUITE: None logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | NOTES: logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | Minio can be accessed via port 9000 on the following DNS name from within your cluster: logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | minio-service.kuttl-test-hardy-killdeer.svc.cluster.local logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | To access Minio from localhost, run the below commands: logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | 1. export POD_NAME=$(kubectl get pods --namespace kuttl-test-hardy-killdeer -l "release=minio-service" -o jsonpath="{.items[0].metadata.name}") logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | 2. kubectl port-forward $POD_NAME 9000 --namespace kuttl-test-hardy-killdeer logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | Read more about port forwarding here: http://kubernetes.io/docs/user-guide/kubectl/kubectl_port-forward/ logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | You can now access Minio server on http://localhost:9000. Follow the below steps to connect to Minio server with mc client: logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | 1. Download the Minio mc client - https://docs.minio.io/docs/minio-client-quickstart-guide logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | 2. Get the ACCESS_KEY=$(kubectl get secret minio-service -o jsonpath="{.data.accesskey}" | base64 --decode) and the SECRET_KEY=$(kubectl get secret minio-service -o jsonpath="{.data.secretkey}" | base64 --decode) logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | 3. mc alias set minio-service-local http://localhost:9000 "$ACCESS_KEY" "$SECRET_KEY" --api s3v4 logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | 4. mc ls minio-service-local logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | Alternately, you can use your browser or the Minio SDK to access the server - https://docs.minio.io/categories/17 logger.go:42: 14:07:05 | demand-backup/1-deploy-operator | ++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=release=minio-service -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:07:06 | demand-backup/1-deploy-operator | + MINIO_POD=minio-service-6f47578c5b-snpfm logger.go:42: 14:07:06 | demand-backup/1-deploy-operator | + wait_pod minio-service-6f47578c5b-snpfm logger.go:42: 14:07:06 | demand-backup/1-deploy-operator | + local pod=minio-service-6f47578c5b-snpfm logger.go:42: 14:07:06 | demand-backup/1-deploy-operator | + set +o xtrace logger.go:42: 14:07:21 | demand-backup/1-deploy-operator | minio-service-6f47578c5b-snpfm........true logger.go:42: 14:07:21 | demand-backup/1-deploy-operator | + kubectl -n kuttl-test-hardy-killdeer run -i --rm aws-cli --image=perconalab/awscli --restart=Never -- bash -c 'AWS_ACCESS_KEY_ID='\''some-access$\n"-key'\'' AWS_SECRET_ACCESS_KEY='\''some-$\n"secret-key'\'' AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 mb s3://operator-testing' logger.go:42: 14:07:27 | demand-backup/1-deploy-operator | If you don't see a command prompt, try pressing enter. logger.go:42: 14:07:28 | demand-backup/1-deploy-operator | Error attaching, falling back to logs: Internal error occurred: error attaching to container: failed to load task: no running task found: task ad964e9ab584c8998fb75c48a810d165bcd0d31d0b23ee81488f483366656aee not found: not found logger.go:42: 14:07:28 | demand-backup/1-deploy-operator | make_bucket: operator-testing logger.go:42: 14:07:30 | demand-backup/1-deploy-operator | pod "aws-cli" deleted logger.go:42: 14:07:32 | demand-backup/1-deploy-operator | test step completed 1-deploy-operator logger.go:42: 14:07:32 | demand-backup/2-create-cluster | starting test step 2-create-cluster logger.go:42: 14:07:32 | demand-backup/2-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr \ | yq eval '.spec.mysql.clusterType="async"' - \ | yq eval '.spec.mysql.size=3' - \ | yq eval '.spec.proxy.haproxy.enabled=true' - \ | yq eval '.spec.proxy.haproxy.size=3' - \ | yq eval '.spec.orchestrator.enabled=true' - \ | yq eval '.spec.orchestrator.size=3' - \ | yq eval '.spec.backup.storages.minio.type="s3"' - \ | yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - \ | yq eval '.spec.backup.storages.minio.s3.endpointUrl="http://minio-service:9000"' - \ | yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.aws-s3.type="s3"' - \ | yq eval '.spec.backup.storages.aws-s3.verifyTLS=true' - \ | yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - \ | yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - \ | yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - \ | yq eval '.spec.backup.storages.gcp-cs.verifyTLS=true' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - \ | yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - \ | yq eval '.spec.backup.storages.azure-blob.type="azure"' - \ | yq eval '.spec.backup.storages.azure-blob.verifyTLS=true' - \ | yq eval '.spec.backup.storages.azure-blob.azure.containerName="operator-testing"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - \ | yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 14:07:32 | demand-backup/2-create-cluster | + source ../../functions logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ realpath ../../.. logger.go:42: 14:07:32 | demand-backup/2-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:07:32 | demand-backup/2-create-cluster | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:07:32 | demand-backup/2-create-cluster | ++++ mktemp -d logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export TEMP_DIR=/tmp/tmp.HLm7fiQEMi logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ TEMP_DIR=/tmp/tmp.HLm7fiQEMi logger.go:42: 14:07:32 | demand-backup/2-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ GIT_BRANCH=PR-424 logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export VERSION=PR-424-70568ae logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ VERSION=PR-424-70568ae logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:07:32 | demand-backup/2-create-cluster | ++++ which gdate logger.go:42: 14:07:32 | demand-backup/2-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:07:32 | demand-backup/2-create-cluster | ++++ which date logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ date=/usr/bin/date logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ command -v oc logger.go:42: 14:07:32 | demand-backup/2-create-cluster | +++ oc get projects logger.go:42: 14:07:37 | demand-backup/2-create-cluster | error: the server doesn't have a resource type "projects" logger.go:42: 14:07:37 | demand-backup/2-create-cluster | +++ kubectl get nodes logger.go:42: 14:07:37 | demand-backup/2-create-cluster | +++ grep '^minikube' logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++++ pwd logger.go:42: 14:07:38 | demand-backup/2-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ test_name=demand-backup logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + get_cr logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + local name_suffix= logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval .spec.mysql.size=3 - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.bucket="operator-testing"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.credentialsSecret="azure-secret"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.credentialsSecret="aws-s3-secret"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + kubectl -n kuttl-test-hardy-killdeer apply -f - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.bucket="operator-testing"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.size=3 - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.type="s3"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.credentialsSecret="minio-secret"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.prefix="ps"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.prefix="ps"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.aws-s3.verifyTLS=true - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.upgradeOptions.apply="disabled"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.endpointUrl="http://minio-service:9000"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.minio.s3.region="us-east-1"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.clusterType="async"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.type="gcs"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + '[' -n '' ']' logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.gcp-cs.verifyTLS=true - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.bucket="operator-testing"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.azure.containerName="operator-testing"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.azure-blob.type="azure"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.type="s3"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval .spec.orchestrator.enabled=true - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.spec.backup.image="%s"' perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.image="perconalab/percona-server-mysql-operator:main-backup"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.metadata.name="%s"' demand-backup logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.router.image="%s"' perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.router.image="perconalab/percona-server-mysql-operator:main-router"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.spec.orchestrator.image="%s"' perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.orchestrator.image="perconalab/percona-server-mysql-operator:main-orchestrator"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.metadata.name="demand-backup"' /mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy/cr.yaml logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.enabled=true - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.sslSecretName="test-ssl"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.region="us-east-1"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.spec.mysql.image="%s"' perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.aws-s3.s3.prefix="ps"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.credentialsSecret="gcp-cs-secret"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.backup.storages.gcp-cs.gcs.endpointUrl="https://storage.googleapis.com"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval .spec.backup.storages.azure-blob.verifyTLS=true - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.mysql.image="perconalab/percona-server-mysql-operator:main-psmysql"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.spec.pmm.image="%s"' perconalab/pmm-client:dev-latest logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.pmm.image="perconalab/pmm-client:dev-latest"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval .spec.proxy.haproxy.size=3 - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.secretsName="test-secrets"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.spec.initImage="%s"' perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.initImage="perconalab/percona-server-mysql-operator:PR-424-70568ae"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.spec.toolkit.image="%s"' perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.toolkit.image="perconalab/percona-server-mysql-operator:main-toolkit"' - logger.go:42: 14:07:38 | demand-backup/2-create-cluster | ++ printf '.spec.proxy.haproxy.image="%s"' perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:07:38 | demand-backup/2-create-cluster | + yq eval '.spec.proxy.haproxy.image="perconalab/percona-server-mysql-operator:main-haproxy"' - logger.go:42: 14:07:39 | demand-backup/2-create-cluster | perconaservermysql.ps.percona.com/demand-backup created logger.go:42: 14:12:50 | demand-backup/2-create-cluster | test step completed 2-create-cluster logger.go:42: 14:12:50 | demand-backup/3-write-data | starting test step 3-write-data logger.go:42: 14:12:50 | demand-backup/3-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" run_mysql \ "INSERT myDB.myTable (id) VALUES (100500)" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password"] logger.go:42: 14:12:50 | demand-backup/3-write-data | + source ../../functions logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ realpath ../../.. logger.go:42: 14:12:50 | demand-backup/3-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:12:50 | demand-backup/3-write-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:12:50 | demand-backup/3-write-data | ++++ mktemp -d logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export TEMP_DIR=/tmp/tmp.NaEjuB4eaH logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ TEMP_DIR=/tmp/tmp.NaEjuB4eaH logger.go:42: 14:12:50 | demand-backup/3-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:12:50 | demand-backup/3-write-data | ++++ which gdate logger.go:42: 14:12:50 | demand-backup/3-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:12:50 | demand-backup/3-write-data | ++++ which date logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ date=/usr/bin/date logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ command -v oc logger.go:42: 14:12:50 | demand-backup/3-write-data | +++ oc get projects logger.go:42: 14:12:56 | demand-backup/3-write-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:12:56 | demand-backup/3-write-data | +++ kubectl get nodes logger.go:42: 14:12:56 | demand-backup/3-write-data | +++ grep '^minikube' logger.go:42: 14:12:57 | demand-backup/3-write-data | ++++ pwd logger.go:42: 14:12:57 | demand-backup/3-write-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:12:57 | demand-backup/3-write-data | ++ test_name=demand-backup logger.go:42: 14:12:57 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 14:12:57 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:12:58 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 14:12:58 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 14:12:58 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 14:12:58 | demand-backup/3-write-data | + run_mysql 'CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:12:58 | demand-backup/3-write-data | + local 'command=CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)' logger.go:42: 14:12:58 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:12:58 | demand-backup/3-write-data | + local pod= logger.go:42: 14:12:58 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 14:12:58 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:12:58 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 14:12:58 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 14:12:58 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 14:12:58 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 14:12:59 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 14:12:59 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 14:12:59 | demand-backup/3-write-data | + kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "CREATE DATABASE IF NOT EXISTS myDB; CREATE TABLE IF NOT EXISTS myDB.myTable (id int PRIMARY KEY)" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:12:59 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:13:01 | demand-backup/3-write-data | + : logger.go:42: 14:13:01 | demand-backup/3-write-data | +++ get_cluster_name logger.go:42: 14:13:01 | demand-backup/3-write-data | +++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:13:01 | demand-backup/3-write-data | ++ get_haproxy_svc demand-backup logger.go:42: 14:13:01 | demand-backup/3-write-data | ++ local cluster=demand-backup logger.go:42: 14:13:01 | demand-backup/3-write-data | ++ echo demand-backup-haproxy logger.go:42: 14:13:01 | demand-backup/3-write-data | + run_mysql 'INSERT myDB.myTable (id) VALUES (100500)' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:13:01 | demand-backup/3-write-data | + local 'command=INSERT myDB.myTable (id) VALUES (100500)' logger.go:42: 14:13:01 | demand-backup/3-write-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:13:01 | demand-backup/3-write-data | + local pod= logger.go:42: 14:13:01 | demand-backup/3-write-data | ++ get_client_pod logger.go:42: 14:13:01 | demand-backup/3-write-data | ++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:13:02 | demand-backup/3-write-data | + client_pod=mysql-client logger.go:42: 14:13:02 | demand-backup/3-write-data | + wait_pod mysql-client logger.go:42: 14:13:02 | demand-backup/3-write-data | + local pod=mysql-client logger.go:42: 14:13:02 | demand-backup/3-write-data | + set +o xtrace logger.go:42: 14:13:03 | demand-backup/3-write-data | mysql-clienttrue logger.go:42: 14:13:03 | demand-backup/3-write-data | + kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "INSERT myDB.myTable (id) VALUES (100500)" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:13:03 | demand-backup/3-write-data | + sed -e 's/mysql: //' logger.go:42: 14:13:03 | demand-backup/3-write-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:13:04 | demand-backup/3-write-data | + : logger.go:42: 14:13:05 | demand-backup/3-write-data | test step completed 3-write-data logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | starting test step 4-move-primary-before-backup logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary_pod_from_label="$(get_primary_from_label)" kubectl delete pod -n ${NAMESPACE} ${primary_pod_from_label} wait_cluster_consistency_async "${test_name}" "3" "3" new_primary_pod_from_label="$(get_primary_from_label)" if [ "${primary_pod_from_label}" == "${new_primary_pod_from_label}" ]; then echo "Old (${primary_pod_from_label}) and new (${new_primary_pod_from_label}) primary are the same (the failover didn't happen)!" exit 1 fi] logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | + source ../../functions logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ realpath ../../.. logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | ++++ mktemp -d logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export TEMP_DIR=/tmp/tmp.8h2wRtAsYK logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ TEMP_DIR=/tmp/tmp.8h2wRtAsYK logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ GIT_BRANCH=PR-424 logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export VERSION=PR-424-70568ae logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ VERSION=PR-424-70568ae logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | ++++ which gdate logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | ++++ which date logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ date=/usr/bin/date logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ command -v oc logger.go:42: 14:13:05 | demand-backup/4-move-primary-before-backup | +++ oc get projects logger.go:42: 14:13:11 | demand-backup/4-move-primary-before-backup | error: the server doesn't have a resource type "projects" logger.go:42: 14:13:11 | demand-backup/4-move-primary-before-backup | +++ kubectl get nodes logger.go:42: 14:13:11 | demand-backup/4-move-primary-before-backup | +++ grep '^minikube' logger.go:42: 14:13:12 | demand-backup/4-move-primary-before-backup | ++++ pwd logger.go:42: 14:13:12 | demand-backup/4-move-primary-before-backup | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:13:12 | demand-backup/4-move-primary-before-backup | ++ test_name=demand-backup logger.go:42: 14:13:12 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 14:13:12 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-hardy-killdeer get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 14:13:12 | demand-backup/4-move-primary-before-backup | + primary_pod_from_label=demand-backup-mysql-0 logger.go:42: 14:13:12 | demand-backup/4-move-primary-before-backup | + kubectl delete pod -n kuttl-test-hardy-killdeer demand-backup-mysql-0 logger.go:42: 14:13:13 | demand-backup/4-move-primary-before-backup | pod "demand-backup-mysql-0" deleted logger.go:42: 14:13:33 | demand-backup/4-move-primary-before-backup | + wait_cluster_consistency_async demand-backup 3 3 logger.go:42: 14:13:33 | demand-backup/4-move-primary-before-backup | + local cluster_name=demand-backup logger.go:42: 14:13:33 | demand-backup/4-move-primary-before-backup | + local cluster_size=3 logger.go:42: 14:13:33 | demand-backup/4-move-primary-before-backup | + local orc_size=3 logger.go:42: 14:13:33 | demand-backup/4-move-primary-before-backup | + '[' -z 3 ']' logger.go:42: 14:13:33 | demand-backup/4-move-primary-before-backup | + sleep 7 logger.go:42: 14:13:40 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.mysql.state}' logger.go:42: 14:13:41 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 14:13:41 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 14:13:41 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 14:13:41 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 14:13:56 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.mysql.state}' logger.go:42: 14:13:57 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 14:13:57 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 14:13:57 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 14:13:57 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 14:14:12 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.mysql.state}' logger.go:42: 14:14:13 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 14:14:13 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 14:14:13 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 14:14:13 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 14:14:28 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.mysql.state}' logger.go:42: 14:14:28 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 14:14:28 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 14:14:28 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 14:14:28 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 14:14:43 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.mysql.state}' logger.go:42: 14:14:44 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 14:14:44 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 14:14:44 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 14:14:44 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 14:14:59 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.mysql.state}' logger.go:42: 14:15:00 | demand-backup/4-move-primary-before-backup | + [[ initializing == \r\e\a\d\y ]] logger.go:42: 14:15:00 | demand-backup/4-move-primary-before-backup | + echo 'waiting for cluster readyness (async)' logger.go:42: 14:15:00 | demand-backup/4-move-primary-before-backup | waiting for cluster readyness (async) logger.go:42: 14:15:00 | demand-backup/4-move-primary-before-backup | + sleep 15 logger.go:42: 14:15:15 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.mysql.state}' logger.go:42: 14:15:16 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 14:15:16 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.mysql.ready}' logger.go:42: 14:15:17 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 14:15:17 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.orchestrator.ready}' logger.go:42: 14:15:18 | demand-backup/4-move-primary-before-backup | + [[ 3 == \3 ]] logger.go:42: 14:15:18 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.orchestrator.state}' logger.go:42: 14:15:19 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 14:15:19 | demand-backup/4-move-primary-before-backup | ++ kubectl get ps demand-backup -n kuttl-test-hardy-killdeer -o 'jsonpath={.status.state}' logger.go:42: 14:15:19 | demand-backup/4-move-primary-before-backup | + [[ ready == \r\e\a\d\y ]] logger.go:42: 14:15:19 | demand-backup/4-move-primary-before-backup | ++ get_primary_from_label logger.go:42: 14:15:19 | demand-backup/4-move-primary-before-backup | ++ kubectl -n kuttl-test-hardy-killdeer get pods -l mysql.percona.com/primary=true '-ojsonpath={.items[0].metadata.name}' logger.go:42: 14:15:20 | demand-backup/4-move-primary-before-backup | + new_primary_pod_from_label=demand-backup-mysql-2 logger.go:42: 14:15:20 | demand-backup/4-move-primary-before-backup | + '[' demand-backup-mysql-0 == demand-backup-mysql-2 ']' logger.go:42: 14:15:21 | demand-backup/4-move-primary-before-backup | test step completed 4-move-primary-before-backup logger.go:42: 14:15:21 | demand-backup/5-create-backup-minio | starting test step 5-create-backup-minio logger.go:42: 14:15:22 | demand-backup/5-create-backup-minio | PerconaServerMySQLBackup:kuttl-test-hardy-killdeer/demand-backup-minio created logger.go:42: 14:15:38 | demand-backup/5-create-backup-minio | test step completed 5-create-backup-minio logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | starting test step 6-check-password-leak logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | + source ../../functions logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ realpath ../../.. logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | ++++ mktemp -d logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export TEMP_DIR=/tmp/tmp.c3nyjK7VRj logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ TEMP_DIR=/tmp/tmp.c3nyjK7VRj logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ GIT_BRANCH=PR-424 logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export VERSION=PR-424-70568ae logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ VERSION=PR-424-70568ae logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | ++++ which gdate logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | ++++ which date logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ date=/usr/bin/date logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ command -v oc logger.go:42: 14:15:38 | demand-backup/6-check-password-leak | +++ oc get projects logger.go:42: 14:15:44 | demand-backup/6-check-password-leak | error: the server doesn't have a resource type "projects" logger.go:42: 14:15:44 | demand-backup/6-check-password-leak | +++ kubectl get nodes logger.go:42: 14:15:44 | demand-backup/6-check-password-leak | +++ grep '^minikube' logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | ++++ pwd logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | ++ test_name=demand-backup logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | + check_passwords_leak logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12")) | not) | .value' logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | + local secrets= logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | + local 'passwords= ' logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pods -o name logger.go:42: 14:15:45 | demand-backup/6-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | + local 'pods=demand-backup-haproxy-0 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | demand-backup-haproxy-1 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | demand-backup-haproxy-2 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | demand-backup-mysql-0 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | demand-backup-mysql-1 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | demand-backup-mysql-2 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | demand-backup-orc-0 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | demand-backup-orc-1 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | demand-backup-orc-2 logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | minio-service-6f47578c5b-snpfm logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | mysql-client logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | percona-server-mysql-operator-6b56d66f99-fn9ww logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | xb-demand-backup-minio-minio-kdw58' logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | + collect_logs kuttl-test-hardy-killdeer logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | + NS=kuttl-test-hardy-killdeer logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:15:46 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:15:47 | demand-backup/6-check-password-leak | + local 'containers=haproxy mysql-monit' logger.go:42: 14:15:47 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:47 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-0 -c haproxy logger.go:42: 14:15:48 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 14:15:48 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 14:15:48 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:48 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-0 -c mysql-monit logger.go:42: 14:15:49 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 14:15:49 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 14:15:49 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:15:49 | demand-backup/6-check-password-leak | logger.go:42: 14:15:49 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:15:49 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:15:50 | demand-backup/6-check-password-leak | + local 'containers=haproxy mysql-monit' logger.go:42: 14:15:50 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:50 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-1 -c haproxy logger.go:42: 14:15:51 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 14:15:51 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 14:15:51 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:51 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-1 -c mysql-monit logger.go:42: 14:15:52 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 14:15:52 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 14:15:52 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:15:52 | demand-backup/6-check-password-leak | logger.go:42: 14:15:52 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:15:52 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:15:53 | demand-backup/6-check-password-leak | + local 'containers=haproxy mysql-monit' logger.go:42: 14:15:53 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:53 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-2 -c haproxy logger.go:42: 14:15:54 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 14:15:54 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 14:15:54 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:54 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-2 -c mysql-monit logger.go:42: 14:15:56 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 14:15:56 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 14:15:56 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:15:56 | demand-backup/6-check-password-leak | logger.go:42: 14:15:56 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:15:56 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:15:57 | demand-backup/6-check-password-leak | + local 'containers=mysql xtrabackup pt-heartbeat' logger.go:42: 14:15:57 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:57 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-0 -c mysql logger.go:42: 14:15:58 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 14:15:58 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 14:15:58 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:58 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-0 -c xtrabackup logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-0 -c pt-heartbeat logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:15:59 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:00 | demand-backup/6-check-password-leak | + local 'containers=mysql xtrabackup pt-heartbeat' logger.go:42: 14:16:00 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:00 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-1 -c mysql logger.go:42: 14:16:01 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 14:16:01 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 14:16:01 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:01 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-1 -c xtrabackup logger.go:42: 14:16:02 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 14:16:02 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 14:16:02 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:02 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-1 -c pt-heartbeat logger.go:42: 14:16:03 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 14:16:03 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 14:16:03 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:03 | demand-backup/6-check-password-leak | logger.go:42: 14:16:03 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:16:03 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:04 | demand-backup/6-check-password-leak | + local 'containers=mysql xtrabackup pt-heartbeat' logger.go:42: 14:16:04 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:04 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-2 -c mysql logger.go:42: 14:16:05 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 14:16:05 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 14:16:05 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:05 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-2 -c xtrabackup logger.go:42: 14:16:06 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 14:16:06 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 14:16:06 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:06 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-2 -c pt-heartbeat logger.go:42: 14:16:07 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 14:16:07 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 14:16:07 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:07 | demand-backup/6-check-password-leak | logger.go:42: 14:16:07 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:16:07 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:08 | demand-backup/6-check-password-leak | + local 'containers=orc mysql-monit' logger.go:42: 14:16:08 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:08 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-0 -c orc logger.go:42: 14:16:09 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 14:16:09 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 14:16:09 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:09 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-0 -c mysql-monit logger.go:42: 14:16:10 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 14:16:10 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 14:16:10 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:10 | demand-backup/6-check-password-leak | logger.go:42: 14:16:10 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:16:10 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:11 | demand-backup/6-check-password-leak | + local 'containers=orc mysql-monit' logger.go:42: 14:16:11 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:11 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-1 -c orc logger.go:42: 14:16:12 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 14:16:12 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 14:16:12 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:12 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-1 -c mysql-monit logger.go:42: 14:16:13 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 14:16:13 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 14:16:13 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:13 | demand-backup/6-check-password-leak | logger.go:42: 14:16:13 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:16:13 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:14 | demand-backup/6-check-password-leak | + local 'containers=orc mysql-monit' logger.go:42: 14:16:14 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:14 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-2 -c orc logger.go:42: 14:16:15 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 14:16:15 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 14:16:15 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:15 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-2 -c mysql-monit logger.go:42: 14:16:16 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 14:16:16 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 14:16:16 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:16 | demand-backup/6-check-password-leak | logger.go:42: 14:16:16 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:16:16 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod minio-service-6f47578c5b-snpfm -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | + local containers=minio logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs minio-service-6f47578c5b-snpfm -c minio logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-minio-service-6f47578c5b-snpfm-minio.txt logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-minio-service-6f47578c5b-snpfm-minio.txt logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:16:17 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:18 | demand-backup/6-check-password-leak | + local containers=mysql-client logger.go:42: 14:16:18 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:18 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs mysql-client -c mysql-client logger.go:42: 14:16:19 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-mysql-client-mysql-client.txt logger.go:42: 14:16:19 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-mysql-client-mysql-client.txt logger.go:42: 14:16:19 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:19 | demand-backup/6-check-password-leak | logger.go:42: 14:16:19 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:16:19 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod percona-server-mysql-operator-6b56d66f99-fn9ww -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:20 | demand-backup/6-check-password-leak | + local containers=manager logger.go:42: 14:16:20 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:20 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs percona-server-mysql-operator-6b56d66f99-fn9ww -c manager logger.go:42: 14:16:21 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-percona-server-mysql-operator-6b56d66f99-fn9ww-manager.txt logger.go:42: 14:16:21 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-percona-server-mysql-operator-6b56d66f99-fn9ww-manager.txt logger.go:42: 14:16:21 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:21 | demand-backup/6-check-password-leak | logger.go:42: 14:16:21 | demand-backup/6-check-password-leak | + for p in '$pods' logger.go:42: 14:16:21 | demand-backup/6-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod xb-demand-backup-minio-minio-kdw58 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:16:22 | demand-backup/6-check-password-leak | + local containers=xtrabackup logger.go:42: 14:16:22 | demand-backup/6-check-password-leak | + for c in '$containers' logger.go:42: 14:16:22 | demand-backup/6-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs xb-demand-backup-minio-minio-kdw58 -c xtrabackup logger.go:42: 14:16:23 | demand-backup/6-check-password-leak | + echo logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-xb-demand-backup-minio-minio-kdw58-xtrabackup.txt logger.go:42: 14:16:23 | demand-backup/6-check-password-leak | logs saved in: /tmp/tmp.c3nyjK7VRj/logs_output-xb-demand-backup-minio-minio-kdw58-xtrabackup.txt logger.go:42: 14:16:23 | demand-backup/6-check-password-leak | + echo logger.go:42: 14:16:23 | demand-backup/6-check-password-leak | logger.go:42: 14:16:23 | demand-backup/6-check-password-leak | + '[' -n '' ']' logger.go:42: 14:16:24 | demand-backup/6-check-password-leak | test step completed 6-check-password-leak logger.go:42: 14:16:24 | demand-backup/7-delete-data | starting test step 7-delete-data logger.go:42: 14:16:24 | demand-backup/7-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 14:16:24 | demand-backup/7-delete-data | + source ../../functions logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ realpath ../../.. logger.go:42: 14:16:24 | demand-backup/7-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:16:24 | demand-backup/7-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:16:24 | demand-backup/7-delete-data | ++++ mktemp -d logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export TEMP_DIR=/tmp/tmp.urlh7cNaBK logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ TEMP_DIR=/tmp/tmp.urlh7cNaBK logger.go:42: 14:16:24 | demand-backup/7-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:16:24 | demand-backup/7-delete-data | ++++ which gdate logger.go:42: 14:16:24 | demand-backup/7-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:16:24 | demand-backup/7-delete-data | ++++ which date logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ date=/usr/bin/date logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ command -v oc logger.go:42: 14:16:24 | demand-backup/7-delete-data | +++ oc get projects logger.go:42: 14:16:29 | demand-backup/7-delete-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:16:29 | demand-backup/7-delete-data | +++ grep '^minikube' logger.go:42: 14:16:29 | demand-backup/7-delete-data | +++ kubectl get nodes logger.go:42: 14:16:30 | demand-backup/7-delete-data | ++++ pwd logger.go:42: 14:16:30 | demand-backup/7-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:16:30 | demand-backup/7-delete-data | ++ test_name=demand-backup logger.go:42: 14:16:30 | demand-backup/7-delete-data | +++ get_cluster_name logger.go:42: 14:16:30 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:16:31 | demand-backup/7-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 14:16:31 | demand-backup/7-delete-data | ++ local cluster=demand-backup logger.go:42: 14:16:31 | demand-backup/7-delete-data | ++ echo demand-backup-haproxy logger.go:42: 14:16:31 | demand-backup/7-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:16:31 | demand-backup/7-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 14:16:31 | demand-backup/7-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:16:31 | demand-backup/7-delete-data | + local pod= logger.go:42: 14:16:31 | demand-backup/7-delete-data | ++ get_client_pod logger.go:42: 14:16:31 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:16:32 | demand-backup/7-delete-data | + client_pod=mysql-client logger.go:42: 14:16:32 | demand-backup/7-delete-data | + wait_pod mysql-client logger.go:42: 14:16:32 | demand-backup/7-delete-data | + local pod=mysql-client logger.go:42: 14:16:32 | demand-backup/7-delete-data | + set +o xtrace logger.go:42: 14:16:32 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 14:16:32 | demand-backup/7-delete-data | + sed -e 's/mysql: //' logger.go:42: 14:16:32 | demand-backup/7-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:16:32 | demand-backup/7-delete-data | + kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:16:34 | demand-backup/7-delete-data | + : logger.go:42: 14:16:34 | demand-backup/7-delete-data | ++ get_cluster_name logger.go:42: 14:16:34 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:16:35 | demand-backup/7-delete-data | + cluster_name=demand-backup logger.go:42: 14:16:35 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 14:16:35 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:35 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:16:35 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:35 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 14:16:35 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 14:16:35 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:16:35 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 14:16:35 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 14:16:35 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 14:16:35 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 14:16:36 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 14:16:36 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:36 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:16:36 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:16:38 | demand-backup/7-delete-data | ++ : logger.go:42: 14:16:38 | demand-backup/7-delete-data | + data= logger.go:42: 14:16:38 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 04-delete-data-minio-0 --from-literal=data= logger.go:42: 14:16:38 | demand-backup/7-delete-data | configmap/04-delete-data-minio-0 created logger.go:42: 14:16:38 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 14:16:38 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:38 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:16:38 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:38 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 14:16:38 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 14:16:38 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:16:39 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 14:16:39 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 14:16:39 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 14:16:39 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 14:16:40 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 14:16:40 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:40 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:16:40 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:16:41 | demand-backup/7-delete-data | ++ : logger.go:42: 14:16:41 | demand-backup/7-delete-data | + data= logger.go:42: 14:16:41 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 04-delete-data-minio-1 --from-literal=data= logger.go:42: 14:16:42 | demand-backup/7-delete-data | configmap/04-delete-data-minio-1 created logger.go:42: 14:16:42 | demand-backup/7-delete-data | + for i in 0 1 2 logger.go:42: 14:16:42 | demand-backup/7-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:42 | demand-backup/7-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:16:42 | demand-backup/7-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:42 | demand-backup/7-delete-data | ++ local pod= logger.go:42: 14:16:42 | demand-backup/7-delete-data | +++ get_client_pod logger.go:42: 14:16:42 | demand-backup/7-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:16:43 | demand-backup/7-delete-data | ++ client_pod=mysql-client logger.go:42: 14:16:43 | demand-backup/7-delete-data | ++ wait_pod mysql-client logger.go:42: 14:16:43 | demand-backup/7-delete-data | ++ local pod=mysql-client logger.go:42: 14:16:43 | demand-backup/7-delete-data | ++ set +o xtrace logger.go:42: 14:16:43 | demand-backup/7-delete-data | mysql-clienttrue logger.go:42: 14:16:43 | demand-backup/7-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:16:43 | demand-backup/7-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:16:43 | demand-backup/7-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:16:45 | demand-backup/7-delete-data | ++ : logger.go:42: 14:16:45 | demand-backup/7-delete-data | + data= logger.go:42: 14:16:45 | demand-backup/7-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 04-delete-data-minio-2 --from-literal=data= logger.go:42: 14:16:46 | demand-backup/7-delete-data | configmap/04-delete-data-minio-2 created logger.go:42: 14:16:47 | demand-backup/7-delete-data | test step completed 7-delete-data logger.go:42: 14:16:47 | demand-backup/8-restore-from-minio | starting test step 8-restore-from-minio logger.go:42: 14:16:49 | demand-backup/8-restore-from-minio | PerconaServerMySQLRestore:kuttl-test-hardy-killdeer/demand-backup-restore-minio created logger.go:42: 14:21:45 | demand-backup/8-restore-from-minio | test step completed 8-restore-from-minio logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | starting test step 9-check-password-leak logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | + source ../../functions logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ realpath ../../.. logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | ++++ mktemp -d logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export TEMP_DIR=/tmp/tmp.TCLRFHLchu logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ TEMP_DIR=/tmp/tmp.TCLRFHLchu logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ GIT_BRANCH=PR-424 logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export VERSION=PR-424-70568ae logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ VERSION=PR-424-70568ae logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | ++++ which gdate logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | ++++ which date logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ date=/usr/bin/date logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ command -v oc logger.go:42: 14:21:45 | demand-backup/9-check-password-leak | +++ oc get projects logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | error: the server doesn't have a resource type "projects" logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | +++ grep '^minikube' logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | +++ kubectl get nodes logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | ++++ pwd logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | ++ test_name=demand-backup logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | + check_passwords_leak logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | ++ kubectl get secrets -o json logger.go:42: 14:21:51 | demand-backup/9-check-password-leak | ++ jq -r '.items[].data | to_entries | .[] | select(.key | (endswith(".crt") or endswith(".key") or endswith(".pub") or endswith(".pem") or endswith(".p12")) | not) | .value' logger.go:42: 14:21:52 | demand-backup/9-check-password-leak | + local secrets= logger.go:42: 14:21:52 | demand-backup/9-check-password-leak | + local 'passwords= ' logger.go:42: 14:21:52 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pods -o name logger.go:42: 14:21:52 | demand-backup/9-check-password-leak | ++ awk -F / '{print $2}' logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | + local 'pods=demand-backup-haproxy-0 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | demand-backup-haproxy-1 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | demand-backup-haproxy-2 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | demand-backup-mysql-0 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | demand-backup-mysql-1 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | demand-backup-mysql-2 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | demand-backup-orc-0 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | demand-backup-orc-1 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | demand-backup-orc-2 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | minio-service-6f47578c5b-snpfm logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | mysql-client logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | percona-server-mysql-operator-6b56d66f99-fn9ww logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | xb-demand-backup-minio-minio-kdw58 logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | xb-restore-demand-backup-restore-minio-7rc2b' logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | + collect_logs kuttl-test-hardy-killdeer logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | + NS=kuttl-test-hardy-killdeer logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:21:53 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-haproxy-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:21:54 | demand-backup/9-check-password-leak | + local 'containers=haproxy mysql-monit' logger.go:42: 14:21:54 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:21:54 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-0 -c haproxy logger.go:42: 14:21:55 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 14:21:55 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-0-haproxy.txt logger.go:42: 14:21:55 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:21:55 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-0 -c mysql-monit logger.go:42: 14:21:56 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 14:21:56 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-0-mysql-monit.txt logger.go:42: 14:21:56 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:21:56 | demand-backup/9-check-password-leak | logger.go:42: 14:21:56 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:21:56 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-haproxy-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:21:57 | demand-backup/9-check-password-leak | + local 'containers=haproxy mysql-monit' logger.go:42: 14:21:57 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:21:57 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-1 -c haproxy logger.go:42: 14:21:58 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 14:21:58 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-1-haproxy.txt logger.go:42: 14:21:58 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:21:58 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-1 -c mysql-monit logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-1-mysql-monit.txt logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-haproxy-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | + local 'containers=haproxy mysql-monit' logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:21:59 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-2 -c haproxy logger.go:42: 14:22:00 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 14:22:00 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-2-haproxy.txt logger.go:42: 14:22:00 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:00 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-haproxy-2 -c mysql-monit logger.go:42: 14:22:01 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 14:22:01 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-haproxy-2-mysql-monit.txt logger.go:42: 14:22:01 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:01 | demand-backup/9-check-password-leak | logger.go:42: 14:22:01 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:01 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-mysql-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:02 | demand-backup/9-check-password-leak | + local 'containers=mysql xtrabackup pt-heartbeat' logger.go:42: 14:22:02 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:02 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-0 -c mysql logger.go:42: 14:22:03 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 14:22:03 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-0-mysql.txt logger.go:42: 14:22:03 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:03 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-0 -c xtrabackup logger.go:42: 14:22:04 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 14:22:04 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-0-xtrabackup.txt logger.go:42: 14:22:04 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:04 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-0 -c pt-heartbeat logger.go:42: 14:22:05 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 14:22:05 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-0-pt-heartbeat.txt logger.go:42: 14:22:05 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:05 | demand-backup/9-check-password-leak | logger.go:42: 14:22:05 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:05 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-mysql-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:06 | demand-backup/9-check-password-leak | + local 'containers=mysql xtrabackup pt-heartbeat' logger.go:42: 14:22:06 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:06 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-1 -c mysql logger.go:42: 14:22:07 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 14:22:07 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-1-mysql.txt logger.go:42: 14:22:07 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:07 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-1 -c xtrabackup logger.go:42: 14:22:08 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 14:22:08 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-1-xtrabackup.txt logger.go:42: 14:22:08 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:08 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-1 -c pt-heartbeat logger.go:42: 14:22:09 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 14:22:09 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-1-pt-heartbeat.txt logger.go:42: 14:22:09 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:09 | demand-backup/9-check-password-leak | logger.go:42: 14:22:09 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:09 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-mysql-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:10 | demand-backup/9-check-password-leak | + local 'containers=mysql xtrabackup pt-heartbeat' logger.go:42: 14:22:10 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:10 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-2 -c mysql logger.go:42: 14:22:11 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 14:22:11 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-2-mysql.txt logger.go:42: 14:22:11 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:11 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-2 -c xtrabackup logger.go:42: 14:22:12 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 14:22:12 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-2-xtrabackup.txt logger.go:42: 14:22:12 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:12 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-mysql-2 -c pt-heartbeat logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-mysql-2-pt-heartbeat.txt logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-orc-0 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | + local 'containers=orc mysql-monit' logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:13 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-0 -c orc logger.go:42: 14:22:14 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 14:22:14 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-0-orc.txt logger.go:42: 14:22:14 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:14 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-0 -c mysql-monit logger.go:42: 14:22:15 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 14:22:15 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-0-mysql-monit.txt logger.go:42: 14:22:15 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:15 | demand-backup/9-check-password-leak | logger.go:42: 14:22:15 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:15 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-orc-1 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:16 | demand-backup/9-check-password-leak | + local 'containers=orc mysql-monit' logger.go:42: 14:22:16 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:16 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-1 -c orc logger.go:42: 14:22:17 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 14:22:17 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-1-orc.txt logger.go:42: 14:22:17 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:17 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-1 -c mysql-monit logger.go:42: 14:22:18 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 14:22:18 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-1-mysql-monit.txt logger.go:42: 14:22:18 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:18 | demand-backup/9-check-password-leak | logger.go:42: 14:22:18 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:18 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod demand-backup-orc-2 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:19 | demand-backup/9-check-password-leak | + local 'containers=orc mysql-monit' logger.go:42: 14:22:19 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:19 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-2 -c orc logger.go:42: 14:22:20 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 14:22:20 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-2-orc.txt logger.go:42: 14:22:20 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:20 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs demand-backup-orc-2 -c mysql-monit logger.go:42: 14:22:21 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 14:22:21 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-demand-backup-orc-2-mysql-monit.txt logger.go:42: 14:22:21 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:21 | demand-backup/9-check-password-leak | logger.go:42: 14:22:21 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:21 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod minio-service-6f47578c5b-snpfm -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:22 | demand-backup/9-check-password-leak | + local containers=minio logger.go:42: 14:22:22 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:22 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs minio-service-6f47578c5b-snpfm -c minio logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-minio-service-6f47578c5b-snpfm-minio.txt logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-minio-service-6f47578c5b-snpfm-minio.txt logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod mysql-client -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | + local containers=mysql-client logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:23 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs mysql-client -c mysql-client logger.go:42: 14:22:24 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-mysql-client-mysql-client.txt logger.go:42: 14:22:24 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-mysql-client-mysql-client.txt logger.go:42: 14:22:24 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:24 | demand-backup/9-check-password-leak | logger.go:42: 14:22:24 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:24 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod percona-server-mysql-operator-6b56d66f99-fn9ww -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:25 | demand-backup/9-check-password-leak | + local containers=manager logger.go:42: 14:22:25 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:25 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs percona-server-mysql-operator-6b56d66f99-fn9ww -c manager logger.go:42: 14:22:26 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-percona-server-mysql-operator-6b56d66f99-fn9ww-manager.txt logger.go:42: 14:22:26 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-percona-server-mysql-operator-6b56d66f99-fn9ww-manager.txt logger.go:42: 14:22:26 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:26 | demand-backup/9-check-password-leak | logger.go:42: 14:22:26 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:26 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod xb-demand-backup-minio-minio-kdw58 -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:27 | demand-backup/9-check-password-leak | + local containers=xtrabackup logger.go:42: 14:22:27 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:27 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs xb-demand-backup-minio-minio-kdw58 -c xtrabackup logger.go:42: 14:22:28 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-xb-demand-backup-minio-minio-kdw58-xtrabackup.txt logger.go:42: 14:22:28 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-xb-demand-backup-minio-minio-kdw58-xtrabackup.txt logger.go:42: 14:22:28 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:28 | demand-backup/9-check-password-leak | logger.go:42: 14:22:28 | demand-backup/9-check-password-leak | + for p in '$pods' logger.go:42: 14:22:28 | demand-backup/9-check-password-leak | ++ kubectl -n kuttl-test-hardy-killdeer get pod xb-restore-demand-backup-restore-minio-7rc2b -o 'jsonpath={.spec.containers[*].name}' logger.go:42: 14:22:29 | demand-backup/9-check-password-leak | + local containers=xtrabackup logger.go:42: 14:22:29 | demand-backup/9-check-password-leak | + for c in '$containers' logger.go:42: 14:22:29 | demand-backup/9-check-password-leak | + kubectl -n kuttl-test-hardy-killdeer logs xb-restore-demand-backup-restore-minio-7rc2b -c xtrabackup logger.go:42: 14:22:30 | demand-backup/9-check-password-leak | + echo logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-xb-restore-demand-backup-restore-minio-7rc2b-xtrabackup.txt logger.go:42: 14:22:30 | demand-backup/9-check-password-leak | logs saved in: /tmp/tmp.TCLRFHLchu/logs_output-xb-restore-demand-backup-restore-minio-7rc2b-xtrabackup.txt logger.go:42: 14:22:30 | demand-backup/9-check-password-leak | + echo logger.go:42: 14:22:30 | demand-backup/9-check-password-leak | logger.go:42: 14:22:30 | demand-backup/9-check-password-leak | + '[' -n '' ']' logger.go:42: 14:22:31 | demand-backup/9-check-password-leak | test step completed 9-check-password-leak logger.go:42: 14:22:31 | demand-backup/10-read-data | starting test step 10-read-data logger.go:42: 14:22:31 | demand-backup/10-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-minio-${i} --from-literal=data="${data}" done] logger.go:42: 14:22:31 | demand-backup/10-read-data | + source ../../functions logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ realpath ../../.. logger.go:42: 14:22:31 | demand-backup/10-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:22:31 | demand-backup/10-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:22:31 | demand-backup/10-read-data | ++++ mktemp -d logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export TEMP_DIR=/tmp/tmp.TerSx1noSk logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ TEMP_DIR=/tmp/tmp.TerSx1noSk logger.go:42: 14:22:31 | demand-backup/10-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:22:31 | demand-backup/10-read-data | ++++ which gdate logger.go:42: 14:22:31 | demand-backup/10-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:22:31 | demand-backup/10-read-data | ++++ which date logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ date=/usr/bin/date logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ command -v oc logger.go:42: 14:22:31 | demand-backup/10-read-data | +++ oc get projects logger.go:42: 14:22:37 | demand-backup/10-read-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:22:37 | demand-backup/10-read-data | +++ grep '^minikube' logger.go:42: 14:22:37 | demand-backup/10-read-data | +++ kubectl get nodes logger.go:42: 14:22:38 | demand-backup/10-read-data | ++++ pwd logger.go:42: 14:22:38 | demand-backup/10-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:22:38 | demand-backup/10-read-data | ++ test_name=demand-backup logger.go:42: 14:22:38 | demand-backup/10-read-data | ++ get_cluster_name logger.go:42: 14:22:38 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:22:38 | demand-backup/10-read-data | + cluster_name=demand-backup logger.go:42: 14:22:38 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 14:22:38 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:38 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:22:38 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:38 | demand-backup/10-read-data | ++ local pod= logger.go:42: 14:22:38 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 14:22:38 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:22:39 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 14:22:39 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 14:22:39 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 14:22:39 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 14:22:40 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 14:22:40 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:40 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:22:40 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:22:41 | demand-backup/10-read-data | + data=100500 logger.go:42: 14:22:41 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-minio-0 --from-literal=data=100500 logger.go:42: 14:22:42 | demand-backup/10-read-data | configmap/06-read-data-minio-0 created logger.go:42: 14:22:42 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 14:22:42 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:42 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:22:42 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:42 | demand-backup/10-read-data | ++ local pod= logger.go:42: 14:22:42 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 14:22:42 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:22:43 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 14:22:43 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 14:22:43 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 14:22:43 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 14:22:44 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 14:22:44 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:22:44 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:22:44 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:45 | demand-backup/10-read-data | + data=100500 logger.go:42: 14:22:45 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-minio-1 --from-literal=data=100500 logger.go:42: 14:22:46 | demand-backup/10-read-data | configmap/06-read-data-minio-1 created logger.go:42: 14:22:46 | demand-backup/10-read-data | + for i in 0 1 2 logger.go:42: 14:22:46 | demand-backup/10-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:46 | demand-backup/10-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:22:46 | demand-backup/10-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:46 | demand-backup/10-read-data | ++ local pod= logger.go:42: 14:22:46 | demand-backup/10-read-data | +++ get_client_pod logger.go:42: 14:22:46 | demand-backup/10-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:22:47 | demand-backup/10-read-data | ++ client_pod=mysql-client logger.go:42: 14:22:47 | demand-backup/10-read-data | ++ wait_pod mysql-client logger.go:42: 14:22:47 | demand-backup/10-read-data | ++ local pod=mysql-client logger.go:42: 14:22:47 | demand-backup/10-read-data | ++ set +o xtrace logger.go:42: 14:22:47 | demand-backup/10-read-data | mysql-clienttrue logger.go:42: 14:22:47 | demand-backup/10-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:22:47 | demand-backup/10-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:22:47 | demand-backup/10-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:22:49 | demand-backup/10-read-data | + data=100500 logger.go:42: 14:22:49 | demand-backup/10-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-minio-2 --from-literal=data=100500 logger.go:42: 14:22:49 | demand-backup/10-read-data | configmap/06-read-data-minio-2 created logger.go:42: 14:22:51 | demand-backup/10-read-data | test step completed 10-read-data logger.go:42: 14:22:51 | demand-backup/11-delete-data | starting test step 11-delete-data logger.go:42: 14:22:51 | demand-backup/11-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2 do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 04-delete-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 14:22:51 | demand-backup/11-delete-data | + source ../../functions logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ realpath ../../.. logger.go:42: 14:22:51 | demand-backup/11-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:22:51 | demand-backup/11-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:22:51 | demand-backup/11-delete-data | ++++ mktemp -d logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export TEMP_DIR=/tmp/tmp.wTKjmn4d7Y logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ TEMP_DIR=/tmp/tmp.wTKjmn4d7Y logger.go:42: 14:22:51 | demand-backup/11-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:22:51 | demand-backup/11-delete-data | ++++ which gdate logger.go:42: 14:22:51 | demand-backup/11-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:22:51 | demand-backup/11-delete-data | ++++ which date logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ date=/usr/bin/date logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ command -v oc logger.go:42: 14:22:51 | demand-backup/11-delete-data | +++ oc get projects logger.go:42: 14:22:57 | demand-backup/11-delete-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:22:57 | demand-backup/11-delete-data | +++ kubectl get nodes logger.go:42: 14:22:57 | demand-backup/11-delete-data | +++ grep '^minikube' logger.go:42: 14:22:58 | demand-backup/11-delete-data | ++++ pwd logger.go:42: 14:22:58 | demand-backup/11-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:22:58 | demand-backup/11-delete-data | ++ test_name=demand-backup logger.go:42: 14:22:58 | demand-backup/11-delete-data | +++ get_cluster_name logger.go:42: 14:22:58 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:22:58 | demand-backup/11-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 14:22:58 | demand-backup/11-delete-data | ++ local cluster=demand-backup logger.go:42: 14:22:58 | demand-backup/11-delete-data | ++ echo demand-backup-haproxy logger.go:42: 14:22:58 | demand-backup/11-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:22:58 | demand-backup/11-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 14:22:58 | demand-backup/11-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:22:58 | demand-backup/11-delete-data | + local pod= logger.go:42: 14:22:58 | demand-backup/11-delete-data | ++ get_client_pod logger.go:42: 14:22:58 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:22:59 | demand-backup/11-delete-data | + client_pod=mysql-client logger.go:42: 14:22:59 | demand-backup/11-delete-data | + wait_pod mysql-client logger.go:42: 14:22:59 | demand-backup/11-delete-data | + local pod=mysql-client logger.go:42: 14:22:59 | demand-backup/11-delete-data | + set +o xtrace logger.go:42: 14:23:00 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 14:23:00 | demand-backup/11-delete-data | + sed -e 's/mysql: //' logger.go:42: 14:23:00 | demand-backup/11-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:23:00 | demand-backup/11-delete-data | + kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:23:01 | demand-backup/11-delete-data | + : logger.go:42: 14:23:01 | demand-backup/11-delete-data | ++ get_cluster_name logger.go:42: 14:23:01 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:23:02 | demand-backup/11-delete-data | + cluster_name=demand-backup logger.go:42: 14:23:02 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 14:23:02 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:02 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:23:02 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:02 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 14:23:02 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 14:23:02 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:23:03 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 14:23:03 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 14:23:03 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 14:23:03 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 14:23:04 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 14:23:04 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:04 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:23:04 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:23:05 | demand-backup/11-delete-data | ++ : logger.go:42: 14:23:05 | demand-backup/11-delete-data | + data= logger.go:42: 14:23:05 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 04-delete-data-minio-backup-source-0 --from-literal=data= logger.go:42: 14:23:06 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-0 created logger.go:42: 14:23:06 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 14:23:06 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:06 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:23:06 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:06 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 14:23:06 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 14:23:06 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:23:07 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 14:23:07 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 14:23:07 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 14:23:07 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 14:23:07 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 14:23:07 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:23:07 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:23:07 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:09 | demand-backup/11-delete-data | ++ : logger.go:42: 14:23:09 | demand-backup/11-delete-data | + data= logger.go:42: 14:23:09 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 04-delete-data-minio-backup-source-1 --from-literal=data= logger.go:42: 14:23:10 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-1 created logger.go:42: 14:23:10 | demand-backup/11-delete-data | + for i in 0 1 2 logger.go:42: 14:23:10 | demand-backup/11-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:10 | demand-backup/11-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:23:10 | demand-backup/11-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:10 | demand-backup/11-delete-data | ++ local pod= logger.go:42: 14:23:10 | demand-backup/11-delete-data | +++ get_client_pod logger.go:42: 14:23:10 | demand-backup/11-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:23:10 | demand-backup/11-delete-data | ++ client_pod=mysql-client logger.go:42: 14:23:10 | demand-backup/11-delete-data | ++ wait_pod mysql-client logger.go:42: 14:23:10 | demand-backup/11-delete-data | ++ local pod=mysql-client logger.go:42: 14:23:10 | demand-backup/11-delete-data | ++ set +o xtrace logger.go:42: 14:23:11 | demand-backup/11-delete-data | mysql-clienttrue logger.go:42: 14:23:11 | demand-backup/11-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:23:11 | demand-backup/11-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:23:11 | demand-backup/11-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:23:13 | demand-backup/11-delete-data | ++ : logger.go:42: 14:23:13 | demand-backup/11-delete-data | + data= logger.go:42: 14:23:13 | demand-backup/11-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 04-delete-data-minio-backup-source-2 --from-literal=data= logger.go:42: 14:23:13 | demand-backup/11-delete-data | configmap/04-delete-data-minio-backup-source-2 created logger.go:42: 14:23:15 | demand-backup/11-delete-data | test step completed 11-delete-data logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | starting test step 12-restore-from-minio-backup-source logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | running command: [sh -c set -o errexit set -o xtrace source ../../functions storage_name="minio" backup_name="demand-backup-minio" restore_name="demand-backup-restore-minio-backup-source" cluster_name="${test_name}${name_suffix:+-$name_suffix}" destination=$(kubectl -n "${NAMESPACE}" get ps-backup "${backup_name}" -o jsonpath='{.status.destination}') cat "${DEPLOY_DIR}/restore.yaml" \ | yq eval "$(printf '.metadata.name="%s"' "${restore_name}")" - \ | yq eval "$(printf '.spec.clusterName="%s"' "${cluster_name}")" - \ | yq eval "del(.spec.backupName)" - \ | yq eval "$(printf '.spec.backupSource.destination="%s"' "${destination}")" - \ | yq eval '.spec.backupSource.storage.type="s3"' - \ | yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - \ | yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - \ | yq eval '.spec.backupSource.storage.s3.endpointUrl="http://minio-service:9000"' - \ | yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - \ | kubectl apply -n "${NAMESPACE}" -f -] logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | + source ../../functions logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ realpath ../../.. logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | ++++ mktemp -d logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export TEMP_DIR=/tmp/tmp.MeqbL0lTJB logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ TEMP_DIR=/tmp/tmp.MeqbL0lTJB logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ GIT_BRANCH=PR-424 logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export VERSION=PR-424-70568ae logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ VERSION=PR-424-70568ae logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | ++++ which gdate logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | ++++ which date logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ date=/usr/bin/date logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ command -v oc logger.go:42: 14:23:15 | demand-backup/12-restore-from-minio-backup-source | +++ oc get projects logger.go:42: 14:23:20 | demand-backup/12-restore-from-minio-backup-source | error: the server doesn't have a resource type "projects" logger.go:42: 14:23:20 | demand-backup/12-restore-from-minio-backup-source | +++ kubectl get nodes logger.go:42: 14:23:20 | demand-backup/12-restore-from-minio-backup-source | +++ grep '^minikube' logger.go:42: 14:23:21 | demand-backup/12-restore-from-minio-backup-source | ++++ pwd logger.go:42: 14:23:21 | demand-backup/12-restore-from-minio-backup-source | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:23:21 | demand-backup/12-restore-from-minio-backup-source | ++ test_name=demand-backup logger.go:42: 14:23:21 | demand-backup/12-restore-from-minio-backup-source | + storage_name=minio logger.go:42: 14:23:21 | demand-backup/12-restore-from-minio-backup-source | + backup_name=demand-backup-minio logger.go:42: 14:23:21 | demand-backup/12-restore-from-minio-backup-source | + restore_name=demand-backup-restore-minio-backup-source logger.go:42: 14:23:21 | demand-backup/12-restore-from-minio-backup-source | + cluster_name=demand-backup logger.go:42: 14:23:21 | demand-backup/12-restore-from-minio-backup-source | ++ kubectl -n kuttl-test-hardy-killdeer get ps-backup demand-backup-minio -o 'jsonpath={.status.destination}' logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + destination=s3://operator-testing/demand-backup-2023-08-17-14:15:22-full logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + cat /mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy/restore.yaml logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval 'del(.spec.backupName)' - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.metadata.name="%s"' demand-backup-restore-minio-backup-source logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.metadata.name="demand-backup-restore-minio-backup-source"' - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.type="s3"' - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.clusterName="%s"' demand-backup logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.clusterName="demand-backup"' - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.credentialsSecret="minio-secret"' - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.bucket="operator-testing"' - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + kubectl apply -n kuttl-test-hardy-killdeer -f - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.endpointUrl="http://minio-service:9000"' - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.storage.s3.region="us-east-1"' - logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | ++ printf '.spec.backupSource.destination="%s"' s3://operator-testing/demand-backup-2023-08-17-14:15:22-full logger.go:42: 14:23:22 | demand-backup/12-restore-from-minio-backup-source | + yq eval '.spec.backupSource.destination="s3://operator-testing/demand-backup-2023-08-17-14:15:22-full"' - logger.go:42: 14:23:23 | demand-backup/12-restore-from-minio-backup-source | perconaservermysqlrestore.ps.percona.com/demand-backup-restore-minio-backup-source created logger.go:42: 14:28:18 | demand-backup/12-restore-from-minio-backup-source | test step completed 12-restore-from-minio-backup-source logger.go:42: 14:28:18 | demand-backup/13-read-data | starting test step 13-read-data logger.go:42: 14:28:18 | demand-backup/13-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2 do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 09-read-data-minio-backup-source-${i} --from-literal=data="${data}" done] logger.go:42: 14:28:18 | demand-backup/13-read-data | + source ../../functions logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ realpath ../../.. logger.go:42: 14:28:18 | demand-backup/13-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:28:18 | demand-backup/13-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:28:18 | demand-backup/13-read-data | ++++ mktemp -d logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export TEMP_DIR=/tmp/tmp.h1pbnxzIao logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ TEMP_DIR=/tmp/tmp.h1pbnxzIao logger.go:42: 14:28:18 | demand-backup/13-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:28:18 | demand-backup/13-read-data | ++++ which gdate logger.go:42: 14:28:18 | demand-backup/13-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:28:18 | demand-backup/13-read-data | ++++ which date logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ date=/usr/bin/date logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ command -v oc logger.go:42: 14:28:18 | demand-backup/13-read-data | +++ oc get projects logger.go:42: 14:28:23 | demand-backup/13-read-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:28:23 | demand-backup/13-read-data | +++ kubectl get nodes logger.go:42: 14:28:23 | demand-backup/13-read-data | +++ grep '^minikube' logger.go:42: 14:28:24 | demand-backup/13-read-data | ++++ pwd logger.go:42: 14:28:24 | demand-backup/13-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:28:24 | demand-backup/13-read-data | ++ test_name=demand-backup logger.go:42: 14:28:24 | demand-backup/13-read-data | ++ get_cluster_name logger.go:42: 14:28:24 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:28:25 | demand-backup/13-read-data | + cluster_name=demand-backup logger.go:42: 14:28:25 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 14:28:25 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:25 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:28:25 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:25 | demand-backup/13-read-data | ++ local pod= logger.go:42: 14:28:25 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 14:28:25 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:28:26 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 14:28:26 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 14:28:26 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 14:28:26 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 14:28:27 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 14:28:27 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:27 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:28:27 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:28:28 | demand-backup/13-read-data | + data=100500 logger.go:42: 14:28:28 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 09-read-data-minio-backup-source-0 --from-literal=data=100500 logger.go:42: 14:28:29 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-0 created logger.go:42: 14:28:29 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 14:28:29 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:29 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:28:29 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:29 | demand-backup/13-read-data | ++ local pod= logger.go:42: 14:28:29 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 14:28:29 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:28:29 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 14:28:29 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 14:28:29 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 14:28:29 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 14:28:30 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 14:28:30 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:30 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:28:30 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:28:32 | demand-backup/13-read-data | + data=100500 logger.go:42: 14:28:32 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 09-read-data-minio-backup-source-1 --from-literal=data=100500 logger.go:42: 14:28:32 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-1 created logger.go:42: 14:28:32 | demand-backup/13-read-data | + for i in 0 1 2 logger.go:42: 14:28:32 | demand-backup/13-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:32 | demand-backup/13-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:28:32 | demand-backup/13-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:32 | demand-backup/13-read-data | ++ local pod= logger.go:42: 14:28:32 | demand-backup/13-read-data | +++ get_client_pod logger.go:42: 14:28:32 | demand-backup/13-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:28:33 | demand-backup/13-read-data | ++ client_pod=mysql-client logger.go:42: 14:28:33 | demand-backup/13-read-data | ++ wait_pod mysql-client logger.go:42: 14:28:33 | demand-backup/13-read-data | ++ local pod=mysql-client logger.go:42: 14:28:33 | demand-backup/13-read-data | ++ set +o xtrace logger.go:42: 14:28:34 | demand-backup/13-read-data | mysql-clienttrue logger.go:42: 14:28:34 | demand-backup/13-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:28:34 | demand-backup/13-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:28:34 | demand-backup/13-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:28:35 | demand-backup/13-read-data | + data=100500 logger.go:42: 14:28:35 | demand-backup/13-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 09-read-data-minio-backup-source-2 --from-literal=data=100500 logger.go:42: 14:28:36 | demand-backup/13-read-data | configmap/09-read-data-minio-backup-source-2 created logger.go:42: 14:28:38 | demand-backup/13-read-data | test step completed 13-read-data logger.go:42: 14:28:38 | demand-backup/14-create-backup-s3 | starting test step 14-create-backup-s3 logger.go:42: 14:28:39 | demand-backup/14-create-backup-s3 | PerconaServerMySQLBackup:kuttl-test-hardy-killdeer/demand-backup-s3 created logger.go:42: 14:28:55 | demand-backup/14-create-backup-s3 | test step completed 14-create-backup-s3 logger.go:42: 14:28:55 | demand-backup/15-delete-data | starting test step 15-delete-data logger.go:42: 14:28:55 | demand-backup/15-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 08-delete-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 14:28:55 | demand-backup/15-delete-data | + source ../../functions logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ realpath ../../.. logger.go:42: 14:28:55 | demand-backup/15-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:28:55 | demand-backup/15-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:28:55 | demand-backup/15-delete-data | ++++ mktemp -d logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export TEMP_DIR=/tmp/tmp.CUKQadHaCW logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ TEMP_DIR=/tmp/tmp.CUKQadHaCW logger.go:42: 14:28:55 | demand-backup/15-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:28:55 | demand-backup/15-delete-data | ++++ which gdate logger.go:42: 14:28:55 | demand-backup/15-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:28:55 | demand-backup/15-delete-data | ++++ which date logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ date=/usr/bin/date logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ command -v oc logger.go:42: 14:28:55 | demand-backup/15-delete-data | +++ oc get projects logger.go:42: 14:29:01 | demand-backup/15-delete-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:29:01 | demand-backup/15-delete-data | +++ grep '^minikube' logger.go:42: 14:29:01 | demand-backup/15-delete-data | +++ kubectl get nodes logger.go:42: 14:29:01 | demand-backup/15-delete-data | ++++ pwd logger.go:42: 14:29:01 | demand-backup/15-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:29:01 | demand-backup/15-delete-data | ++ test_name=demand-backup logger.go:42: 14:29:01 | demand-backup/15-delete-data | +++ get_cluster_name logger.go:42: 14:29:01 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:29:02 | demand-backup/15-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 14:29:02 | demand-backup/15-delete-data | ++ local cluster=demand-backup logger.go:42: 14:29:02 | demand-backup/15-delete-data | ++ echo demand-backup-haproxy logger.go:42: 14:29:02 | demand-backup/15-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:29:02 | demand-backup/15-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 14:29:02 | demand-backup/15-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:29:02 | demand-backup/15-delete-data | + local pod= logger.go:42: 14:29:02 | demand-backup/15-delete-data | ++ get_client_pod logger.go:42: 14:29:02 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:29:03 | demand-backup/15-delete-data | + client_pod=mysql-client logger.go:42: 14:29:03 | demand-backup/15-delete-data | + wait_pod mysql-client logger.go:42: 14:29:03 | demand-backup/15-delete-data | + local pod=mysql-client logger.go:42: 14:29:03 | demand-backup/15-delete-data | + set +o xtrace logger.go:42: 14:29:04 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 14:29:04 | demand-backup/15-delete-data | + sed -e 's/mysql: //' logger.go:42: 14:29:04 | demand-backup/15-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:29:04 | demand-backup/15-delete-data | + kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:29:05 | demand-backup/15-delete-data | + : logger.go:42: 14:29:05 | demand-backup/15-delete-data | ++ get_cluster_name logger.go:42: 14:29:05 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:29:06 | demand-backup/15-delete-data | + cluster_name=demand-backup logger.go:42: 14:29:06 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 14:29:06 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:06 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:29:06 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:06 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 14:29:06 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 14:29:06 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:29:07 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 14:29:07 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 14:29:07 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 14:29:07 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 14:29:07 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 14:29:07 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:07 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:29:07 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:29:09 | demand-backup/15-delete-data | ++ : logger.go:42: 14:29:09 | demand-backup/15-delete-data | + data= logger.go:42: 14:29:09 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 08-delete-data-s3-0 --from-literal=data= logger.go:42: 14:29:10 | demand-backup/15-delete-data | configmap/08-delete-data-s3-0 created logger.go:42: 14:29:10 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 14:29:10 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:10 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:29:10 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:10 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 14:29:10 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 14:29:10 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:29:10 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 14:29:10 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 14:29:10 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 14:29:10 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 14:29:11 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 14:29:11 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:11 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:29:11 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:29:13 | demand-backup/15-delete-data | ++ : logger.go:42: 14:29:13 | demand-backup/15-delete-data | + data= logger.go:42: 14:29:13 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 08-delete-data-s3-1 --from-literal=data= logger.go:42: 14:29:13 | demand-backup/15-delete-data | configmap/08-delete-data-s3-1 created logger.go:42: 14:29:13 | demand-backup/15-delete-data | + for i in 0 1 2 logger.go:42: 14:29:13 | demand-backup/15-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:13 | demand-backup/15-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:29:13 | demand-backup/15-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:13 | demand-backup/15-delete-data | ++ local pod= logger.go:42: 14:29:13 | demand-backup/15-delete-data | +++ get_client_pod logger.go:42: 14:29:13 | demand-backup/15-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:29:14 | demand-backup/15-delete-data | ++ client_pod=mysql-client logger.go:42: 14:29:14 | demand-backup/15-delete-data | ++ wait_pod mysql-client logger.go:42: 14:29:14 | demand-backup/15-delete-data | ++ local pod=mysql-client logger.go:42: 14:29:14 | demand-backup/15-delete-data | ++ set +o xtrace logger.go:42: 14:29:15 | demand-backup/15-delete-data | mysql-clienttrue logger.go:42: 14:29:15 | demand-backup/15-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:29:15 | demand-backup/15-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:29:15 | demand-backup/15-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:29:16 | demand-backup/15-delete-data | ++ : logger.go:42: 14:29:16 | demand-backup/15-delete-data | + data= logger.go:42: 14:29:16 | demand-backup/15-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 08-delete-data-s3-2 --from-literal=data= logger.go:42: 14:29:17 | demand-backup/15-delete-data | configmap/08-delete-data-s3-2 created logger.go:42: 14:29:19 | demand-backup/15-delete-data | test step completed 15-delete-data logger.go:42: 14:29:19 | demand-backup/16-restore-from-s3 | starting test step 16-restore-from-s3 logger.go:42: 14:29:20 | demand-backup/16-restore-from-s3 | PerconaServerMySQLRestore:kuttl-test-hardy-killdeer/demand-backup-restore-s3 created logger.go:42: 14:34:33 | demand-backup/16-restore-from-s3 | test step completed 16-restore-from-s3 logger.go:42: 14:34:33 | demand-backup/17-read-data | starting test step 17-read-data logger.go:42: 14:34:33 | demand-backup/17-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-s3-${i} --from-literal=data="${data}" done] logger.go:42: 14:34:33 | demand-backup/17-read-data | + source ../../functions logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ realpath ../../.. logger.go:42: 14:34:33 | demand-backup/17-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:34:33 | demand-backup/17-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:34:33 | demand-backup/17-read-data | ++++ mktemp -d logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export TEMP_DIR=/tmp/tmp.wUR36m5E3T logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ TEMP_DIR=/tmp/tmp.wUR36m5E3T logger.go:42: 14:34:33 | demand-backup/17-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:34:33 | demand-backup/17-read-data | ++++ which gdate logger.go:42: 14:34:33 | demand-backup/17-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:34:33 | demand-backup/17-read-data | ++++ which date logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ date=/usr/bin/date logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ command -v oc logger.go:42: 14:34:33 | demand-backup/17-read-data | +++ oc get projects logger.go:42: 14:34:39 | demand-backup/17-read-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:34:39 | demand-backup/17-read-data | +++ grep '^minikube' logger.go:42: 14:34:39 | demand-backup/17-read-data | +++ kubectl get nodes logger.go:42: 14:34:40 | demand-backup/17-read-data | ++++ pwd logger.go:42: 14:34:40 | demand-backup/17-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:34:40 | demand-backup/17-read-data | ++ test_name=demand-backup logger.go:42: 14:34:40 | demand-backup/17-read-data | ++ get_cluster_name logger.go:42: 14:34:40 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:34:40 | demand-backup/17-read-data | + cluster_name=demand-backup logger.go:42: 14:34:40 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 14:34:40 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:40 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:34:40 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:40 | demand-backup/17-read-data | ++ local pod= logger.go:42: 14:34:40 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 14:34:40 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:34:41 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 14:34:41 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 14:34:41 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 14:34:41 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 14:34:42 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 14:34:42 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:42 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:34:42 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:34:43 | demand-backup/17-read-data | + data=100500 logger.go:42: 14:34:43 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-s3-0 --from-literal=data=100500 logger.go:42: 14:34:44 | demand-backup/17-read-data | configmap/06-read-data-s3-0 created logger.go:42: 14:34:44 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 14:34:44 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:44 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:34:44 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:44 | demand-backup/17-read-data | ++ local pod= logger.go:42: 14:34:44 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 14:34:44 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:34:45 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 14:34:45 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 14:34:45 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 14:34:45 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 14:34:46 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 14:34:46 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:46 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:34:46 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:34:47 | demand-backup/17-read-data | + data=100500 logger.go:42: 14:34:47 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-s3-1 --from-literal=data=100500 logger.go:42: 14:34:48 | demand-backup/17-read-data | configmap/06-read-data-s3-1 created logger.go:42: 14:34:48 | demand-backup/17-read-data | + for i in 0 1 2 logger.go:42: 14:34:48 | demand-backup/17-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:48 | demand-backup/17-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:34:48 | demand-backup/17-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:48 | demand-backup/17-read-data | ++ local pod= logger.go:42: 14:34:48 | demand-backup/17-read-data | +++ get_client_pod logger.go:42: 14:34:48 | demand-backup/17-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:34:49 | demand-backup/17-read-data | ++ client_pod=mysql-client logger.go:42: 14:34:49 | demand-backup/17-read-data | ++ wait_pod mysql-client logger.go:42: 14:34:49 | demand-backup/17-read-data | ++ local pod=mysql-client logger.go:42: 14:34:49 | demand-backup/17-read-data | ++ set +o xtrace logger.go:42: 14:34:49 | demand-backup/17-read-data | mysql-clienttrue logger.go:42: 14:34:49 | demand-backup/17-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:34:49 | demand-backup/17-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:34:49 | demand-backup/17-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:34:51 | demand-backup/17-read-data | + data=100500 logger.go:42: 14:34:51 | demand-backup/17-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-s3-2 --from-literal=data=100500 logger.go:42: 14:34:52 | demand-backup/17-read-data | configmap/06-read-data-s3-2 created logger.go:42: 14:34:53 | demand-backup/17-read-data | test step completed 17-read-data logger.go:42: 14:34:53 | demand-backup/18-create-backup-gcp | starting test step 18-create-backup-gcp logger.go:42: 14:34:55 | demand-backup/18-create-backup-gcp | PerconaServerMySQLBackup:kuttl-test-hardy-killdeer/demand-backup-gcp created logger.go:42: 14:35:10 | demand-backup/18-create-backup-gcp | test step completed 18-create-backup-gcp logger.go:42: 14:35:10 | demand-backup/19-delete-data | starting test step 19-delete-data logger.go:42: 14:35:10 | demand-backup/19-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 12-delete-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 14:35:10 | demand-backup/19-delete-data | + source ../../functions logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ realpath ../../.. logger.go:42: 14:35:10 | demand-backup/19-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:35:10 | demand-backup/19-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:35:10 | demand-backup/19-delete-data | ++++ mktemp -d logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export TEMP_DIR=/tmp/tmp.jKlCQzsbAC logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ TEMP_DIR=/tmp/tmp.jKlCQzsbAC logger.go:42: 14:35:10 | demand-backup/19-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:35:10 | demand-backup/19-delete-data | ++++ which gdate logger.go:42: 14:35:10 | demand-backup/19-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:35:10 | demand-backup/19-delete-data | ++++ which date logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ date=/usr/bin/date logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ command -v oc logger.go:42: 14:35:10 | demand-backup/19-delete-data | +++ oc get projects logger.go:42: 14:35:16 | demand-backup/19-delete-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:35:16 | demand-backup/19-delete-data | +++ kubectl get nodes logger.go:42: 14:35:16 | demand-backup/19-delete-data | +++ grep '^minikube' logger.go:42: 14:35:17 | demand-backup/19-delete-data | ++++ pwd logger.go:42: 14:35:17 | demand-backup/19-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:35:17 | demand-backup/19-delete-data | ++ test_name=demand-backup logger.go:42: 14:35:17 | demand-backup/19-delete-data | +++ get_cluster_name logger.go:42: 14:35:17 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:35:18 | demand-backup/19-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 14:35:18 | demand-backup/19-delete-data | ++ local cluster=demand-backup logger.go:42: 14:35:18 | demand-backup/19-delete-data | ++ echo demand-backup-haproxy logger.go:42: 14:35:18 | demand-backup/19-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:35:18 | demand-backup/19-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 14:35:18 | demand-backup/19-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:35:18 | demand-backup/19-delete-data | + local pod= logger.go:42: 14:35:18 | demand-backup/19-delete-data | ++ get_client_pod logger.go:42: 14:35:18 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:35:18 | demand-backup/19-delete-data | + client_pod=mysql-client logger.go:42: 14:35:18 | demand-backup/19-delete-data | + wait_pod mysql-client logger.go:42: 14:35:18 | demand-backup/19-delete-data | + local pod=mysql-client logger.go:42: 14:35:18 | demand-backup/19-delete-data | + set +o xtrace logger.go:42: 14:35:19 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 14:35:19 | demand-backup/19-delete-data | + kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:35:19 | demand-backup/19-delete-data | + sed -e 's/mysql: //' logger.go:42: 14:35:19 | demand-backup/19-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:35:21 | demand-backup/19-delete-data | + : logger.go:42: 14:35:21 | demand-backup/19-delete-data | ++ get_cluster_name logger.go:42: 14:35:21 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:35:21 | demand-backup/19-delete-data | + cluster_name=demand-backup logger.go:42: 14:35:21 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 14:35:21 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:21 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:35:21 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:21 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 14:35:21 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 14:35:21 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:35:22 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 14:35:22 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 14:35:22 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 14:35:22 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 14:35:23 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 14:35:23 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:23 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:35:23 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:35:24 | demand-backup/19-delete-data | ++ : logger.go:42: 14:35:24 | demand-backup/19-delete-data | + data= logger.go:42: 14:35:24 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 12-delete-data-gcp-0 --from-literal=data= logger.go:42: 14:35:25 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-0 created logger.go:42: 14:35:25 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 14:35:25 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:25 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:35:25 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:25 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 14:35:25 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 14:35:25 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:35:26 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 14:35:26 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 14:35:26 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 14:35:26 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 14:35:27 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 14:35:27 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:27 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:35:27 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:35:28 | demand-backup/19-delete-data | ++ : logger.go:42: 14:35:28 | demand-backup/19-delete-data | + data= logger.go:42: 14:35:28 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 12-delete-data-gcp-1 --from-literal=data= logger.go:42: 14:35:29 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-1 created logger.go:42: 14:35:29 | demand-backup/19-delete-data | + for i in 0 1 2 logger.go:42: 14:35:29 | demand-backup/19-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:29 | demand-backup/19-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:35:29 | demand-backup/19-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:29 | demand-backup/19-delete-data | ++ local pod= logger.go:42: 14:35:29 | demand-backup/19-delete-data | +++ get_client_pod logger.go:42: 14:35:29 | demand-backup/19-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:35:29 | demand-backup/19-delete-data | ++ client_pod=mysql-client logger.go:42: 14:35:29 | demand-backup/19-delete-data | ++ wait_pod mysql-client logger.go:42: 14:35:29 | demand-backup/19-delete-data | ++ local pod=mysql-client logger.go:42: 14:35:29 | demand-backup/19-delete-data | ++ set +o xtrace logger.go:42: 14:35:30 | demand-backup/19-delete-data | mysql-clienttrue logger.go:42: 14:35:30 | demand-backup/19-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:35:30 | demand-backup/19-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:35:30 | demand-backup/19-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:35:32 | demand-backup/19-delete-data | ++ : logger.go:42: 14:35:32 | demand-backup/19-delete-data | + data= logger.go:42: 14:35:32 | demand-backup/19-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 12-delete-data-gcp-2 --from-literal=data= logger.go:42: 14:35:32 | demand-backup/19-delete-data | configmap/12-delete-data-gcp-2 created logger.go:42: 14:35:34 | demand-backup/19-delete-data | test step completed 19-delete-data logger.go:42: 14:35:34 | demand-backup/20-restore-from-gcp | starting test step 20-restore-from-gcp logger.go:42: 14:35:35 | demand-backup/20-restore-from-gcp | PerconaServerMySQLRestore:kuttl-test-hardy-killdeer/demand-backup-restore-gcp created logger.go:42: 14:40:36 | demand-backup/20-restore-from-gcp | test step completed 20-restore-from-gcp logger.go:42: 14:40:36 | demand-backup/21-read-data | starting test step 21-read-data logger.go:42: 14:40:36 | demand-backup/21-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-gcp-${i} --from-literal=data="${data}" done] logger.go:42: 14:40:36 | demand-backup/21-read-data | + source ../../functions logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ realpath ../../.. logger.go:42: 14:40:36 | demand-backup/21-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:40:36 | demand-backup/21-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:40:36 | demand-backup/21-read-data | ++++ mktemp -d logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export TEMP_DIR=/tmp/tmp.LeIXUbjPoI logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ TEMP_DIR=/tmp/tmp.LeIXUbjPoI logger.go:42: 14:40:36 | demand-backup/21-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:40:36 | demand-backup/21-read-data | ++++ which gdate logger.go:42: 14:40:36 | demand-backup/21-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:40:36 | demand-backup/21-read-data | ++++ which date logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ date=/usr/bin/date logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ command -v oc logger.go:42: 14:40:36 | demand-backup/21-read-data | +++ oc get projects logger.go:42: 14:40:42 | demand-backup/21-read-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:40:42 | demand-backup/21-read-data | +++ kubectl get nodes logger.go:42: 14:40:42 | demand-backup/21-read-data | +++ grep '^minikube' logger.go:42: 14:40:43 | demand-backup/21-read-data | ++++ pwd logger.go:42: 14:40:43 | demand-backup/21-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:40:43 | demand-backup/21-read-data | ++ test_name=demand-backup logger.go:42: 14:40:43 | demand-backup/21-read-data | ++ get_cluster_name logger.go:42: 14:40:43 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:40:43 | demand-backup/21-read-data | + cluster_name=demand-backup logger.go:42: 14:40:43 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 14:40:43 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:43 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:40:43 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:43 | demand-backup/21-read-data | ++ local pod= logger.go:42: 14:40:43 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 14:40:43 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:40:44 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 14:40:44 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 14:40:44 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 14:40:44 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 14:40:45 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 14:40:45 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:45 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:40:45 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:40:46 | demand-backup/21-read-data | + data=100500 logger.go:42: 14:40:46 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-gcp-0 --from-literal=data=100500 logger.go:42: 14:40:47 | demand-backup/21-read-data | configmap/06-read-data-gcp-0 created logger.go:42: 14:40:47 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 14:40:47 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:47 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:40:47 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:47 | demand-backup/21-read-data | ++ local pod= logger.go:42: 14:40:47 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 14:40:47 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:40:48 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 14:40:48 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 14:40:48 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 14:40:48 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 14:40:49 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 14:40:49 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:49 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:40:49 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:40:50 | demand-backup/21-read-data | + data=100500 logger.go:42: 14:40:50 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-gcp-1 --from-literal=data=100500 logger.go:42: 14:40:51 | demand-backup/21-read-data | configmap/06-read-data-gcp-1 created logger.go:42: 14:40:51 | demand-backup/21-read-data | + for i in 0 1 2 logger.go:42: 14:40:51 | demand-backup/21-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:51 | demand-backup/21-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:40:51 | demand-backup/21-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:51 | demand-backup/21-read-data | ++ local pod= logger.go:42: 14:40:51 | demand-backup/21-read-data | +++ get_client_pod logger.go:42: 14:40:51 | demand-backup/21-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:40:51 | demand-backup/21-read-data | ++ client_pod=mysql-client logger.go:42: 14:40:51 | demand-backup/21-read-data | ++ wait_pod mysql-client logger.go:42: 14:40:51 | demand-backup/21-read-data | ++ local pod=mysql-client logger.go:42: 14:40:51 | demand-backup/21-read-data | ++ set +o xtrace logger.go:42: 14:40:52 | demand-backup/21-read-data | mysql-clienttrue logger.go:42: 14:40:52 | demand-backup/21-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:40:52 | demand-backup/21-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:40:52 | demand-backup/21-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:40:54 | demand-backup/21-read-data | + data=100500 logger.go:42: 14:40:54 | demand-backup/21-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-gcp-2 --from-literal=data=100500 logger.go:42: 14:40:54 | demand-backup/21-read-data | configmap/06-read-data-gcp-2 created logger.go:42: 14:40:56 | demand-backup/21-read-data | test step completed 21-read-data logger.go:42: 14:40:56 | demand-backup/22-create-backup-azure | starting test step 22-create-backup-azure logger.go:42: 14:40:57 | demand-backup/22-create-backup-azure | PerconaServerMySQLBackup:kuttl-test-hardy-killdeer/demand-backup-azure created logger.go:42: 14:41:13 | demand-backup/22-create-backup-azure | test step completed 22-create-backup-azure logger.go:42: 14:41:13 | demand-backup/23-delete-data | starting test step 23-delete-data logger.go:42: 14:41:13 | demand-backup/23-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_mysql \ "TRUNCATE TABLE myDB.myTable" \ "-h $(get_haproxy_svc $(get_cluster_name)) -uroot -proot_password" cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 16-delete-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 14:41:13 | demand-backup/23-delete-data | + source ../../functions logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ realpath ../../.. logger.go:42: 14:41:13 | demand-backup/23-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:41:13 | demand-backup/23-delete-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:41:13 | demand-backup/23-delete-data | ++++ mktemp -d logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export TEMP_DIR=/tmp/tmp.19XD5VtfOo logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ TEMP_DIR=/tmp/tmp.19XD5VtfOo logger.go:42: 14:41:13 | demand-backup/23-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:41:13 | demand-backup/23-delete-data | ++++ which gdate logger.go:42: 14:41:13 | demand-backup/23-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:41:13 | demand-backup/23-delete-data | ++++ which date logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ date=/usr/bin/date logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ command -v oc logger.go:42: 14:41:13 | demand-backup/23-delete-data | +++ oc get projects logger.go:42: 14:41:19 | demand-backup/23-delete-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:41:19 | demand-backup/23-delete-data | +++ kubectl get nodes logger.go:42: 14:41:19 | demand-backup/23-delete-data | +++ grep '^minikube' logger.go:42: 14:41:20 | demand-backup/23-delete-data | ++++ pwd logger.go:42: 14:41:20 | demand-backup/23-delete-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:41:20 | demand-backup/23-delete-data | ++ test_name=demand-backup logger.go:42: 14:41:20 | demand-backup/23-delete-data | +++ get_cluster_name logger.go:42: 14:41:20 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:41:20 | demand-backup/23-delete-data | ++ get_haproxy_svc demand-backup logger.go:42: 14:41:20 | demand-backup/23-delete-data | ++ local cluster=demand-backup logger.go:42: 14:41:20 | demand-backup/23-delete-data | ++ echo demand-backup-haproxy logger.go:42: 14:41:20 | demand-backup/23-delete-data | + run_mysql 'TRUNCATE TABLE myDB.myTable' '-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:41:20 | demand-backup/23-delete-data | + local 'command=TRUNCATE TABLE myDB.myTable' logger.go:42: 14:41:20 | demand-backup/23-delete-data | + local 'uri=-h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:41:20 | demand-backup/23-delete-data | + local pod= logger.go:42: 14:41:20 | demand-backup/23-delete-data | ++ get_client_pod logger.go:42: 14:41:20 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:41:21 | demand-backup/23-delete-data | + client_pod=mysql-client logger.go:42: 14:41:21 | demand-backup/23-delete-data | + wait_pod mysql-client logger.go:42: 14:41:21 | demand-backup/23-delete-data | + local pod=mysql-client logger.go:42: 14:41:21 | demand-backup/23-delete-data | + set +o xtrace logger.go:42: 14:41:22 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 14:41:22 | demand-backup/23-delete-data | + kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "TRUNCATE TABLE myDB.myTable" | mysql -sN -h demand-backup-haproxy -uroot -proot_password' logger.go:42: 14:41:22 | demand-backup/23-delete-data | + sed -e 's/mysql: //' logger.go:42: 14:41:22 | demand-backup/23-delete-data | + grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:41:23 | demand-backup/23-delete-data | + : logger.go:42: 14:41:23 | demand-backup/23-delete-data | ++ get_cluster_name logger.go:42: 14:41:23 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:41:24 | demand-backup/23-delete-data | + cluster_name=demand-backup logger.go:42: 14:41:24 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 14:41:24 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:24 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:41:24 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:24 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 14:41:24 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 14:41:24 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:41:25 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 14:41:25 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 14:41:25 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 14:41:25 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 14:41:26 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 14:41:26 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:41:26 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:41:26 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:27 | demand-backup/23-delete-data | ++ : logger.go:42: 14:41:27 | demand-backup/23-delete-data | + data= logger.go:42: 14:41:27 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 16-delete-data-azure-0 --from-literal=data= logger.go:42: 14:41:28 | demand-backup/23-delete-data | configmap/16-delete-data-azure-0 created logger.go:42: 14:41:28 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 14:41:28 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:28 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:41:28 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:28 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 14:41:28 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 14:41:28 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:41:29 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 14:41:29 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 14:41:29 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 14:41:29 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 14:41:29 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 14:41:29 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:29 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:41:29 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:41:31 | demand-backup/23-delete-data | ++ : logger.go:42: 14:41:31 | demand-backup/23-delete-data | + data= logger.go:42: 14:41:31 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 16-delete-data-azure-1 --from-literal=data= logger.go:42: 14:41:32 | demand-backup/23-delete-data | configmap/16-delete-data-azure-1 created logger.go:42: 14:41:32 | demand-backup/23-delete-data | + for i in 0 1 2 logger.go:42: 14:41:32 | demand-backup/23-delete-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:32 | demand-backup/23-delete-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:41:32 | demand-backup/23-delete-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:32 | demand-backup/23-delete-data | ++ local pod= logger.go:42: 14:41:32 | demand-backup/23-delete-data | +++ get_client_pod logger.go:42: 14:41:32 | demand-backup/23-delete-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:41:32 | demand-backup/23-delete-data | ++ client_pod=mysql-client logger.go:42: 14:41:32 | demand-backup/23-delete-data | ++ wait_pod mysql-client logger.go:42: 14:41:32 | demand-backup/23-delete-data | ++ local pod=mysql-client logger.go:42: 14:41:32 | demand-backup/23-delete-data | ++ set +o xtrace logger.go:42: 14:41:33 | demand-backup/23-delete-data | mysql-clienttrue logger.go:42: 14:41:33 | demand-backup/23-delete-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:41:33 | demand-backup/23-delete-data | ++ sed -e 's/mysql: //' logger.go:42: 14:41:33 | demand-backup/23-delete-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:41:35 | demand-backup/23-delete-data | ++ : logger.go:42: 14:41:35 | demand-backup/23-delete-data | + data= logger.go:42: 14:41:35 | demand-backup/23-delete-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 16-delete-data-azure-2 --from-literal=data= logger.go:42: 14:41:35 | demand-backup/23-delete-data | configmap/16-delete-data-azure-2 created logger.go:42: 14:41:37 | demand-backup/23-delete-data | test step completed 23-delete-data logger.go:42: 14:41:37 | demand-backup/24-restore-from-azure | starting test step 24-restore-from-azure logger.go:42: 14:41:38 | demand-backup/24-restore-from-azure | PerconaServerMySQLRestore:kuttl-test-hardy-killdeer/demand-backup-restore-azure created logger.go:42: 14:46:53 | demand-backup/24-restore-from-azure | test step completed 24-restore-from-azure logger.go:42: 14:46:53 | demand-backup/25-read-data | starting test step 25-read-data logger.go:42: 14:46:53 | demand-backup/25-read-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions cluster_name=$(get_cluster_name) for i in 0 1 2; do data=$(run_mysql "SELECT * FROM myDB.myTable" "-h ${cluster_name}-mysql-${i}.${cluster_name}-mysql -uroot -proot_password") kubectl create configmap -n "${NAMESPACE}" 06-read-data-azure-${i} --from-literal=data="${data}" done] logger.go:42: 14:46:53 | demand-backup/25-read-data | + source ../../functions logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ realpath ../../.. logger.go:42: 14:46:53 | demand-backup/25-read-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:46:53 | demand-backup/25-read-data | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:46:53 | demand-backup/25-read-data | ++++ mktemp -d logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export TEMP_DIR=/tmp/tmp.7VE7pztON2 logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ TEMP_DIR=/tmp/tmp.7VE7pztON2 logger.go:42: 14:46:53 | demand-backup/25-read-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ GIT_BRANCH=PR-424 logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export VERSION=PR-424-70568ae logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ VERSION=PR-424-70568ae logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:46:53 | demand-backup/25-read-data | ++++ which gdate logger.go:42: 14:46:53 | demand-backup/25-read-data | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:46:53 | demand-backup/25-read-data | ++++ which date logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ date=/usr/bin/date logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ command -v oc logger.go:42: 14:46:53 | demand-backup/25-read-data | +++ oc get projects logger.go:42: 14:46:58 | demand-backup/25-read-data | error: the server doesn't have a resource type "projects" logger.go:42: 14:46:58 | demand-backup/25-read-data | +++ grep '^minikube' logger.go:42: 14:46:58 | demand-backup/25-read-data | +++ kubectl get nodes logger.go:42: 14:46:59 | demand-backup/25-read-data | ++++ pwd logger.go:42: 14:46:59 | demand-backup/25-read-data | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:46:59 | demand-backup/25-read-data | ++ test_name=demand-backup logger.go:42: 14:46:59 | demand-backup/25-read-data | ++ get_cluster_name logger.go:42: 14:46:59 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-hardy-killdeer get ps -o 'jsonpath={.items[0].metadata.name}' logger.go:42: 14:47:00 | demand-backup/25-read-data | + cluster_name=demand-backup logger.go:42: 14:47:00 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 14:47:00 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:00 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:47:00 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:00 | demand-backup/25-read-data | ++ local pod= logger.go:42: 14:47:00 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 14:47:00 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:47:01 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 14:47:01 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 14:47:01 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 14:47:01 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 14:47:01 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 14:47:01 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-0.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:01 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:47:01 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:47:03 | demand-backup/25-read-data | + data=100500 logger.go:42: 14:47:03 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-azure-0 --from-literal=data=100500 logger.go:42: 14:47:03 | demand-backup/25-read-data | configmap/06-read-data-azure-0 created logger.go:42: 14:47:03 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 14:47:03 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:03 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:47:03 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:03 | demand-backup/25-read-data | ++ local pod= logger.go:42: 14:47:03 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 14:47:03 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:47:04 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 14:47:04 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 14:47:04 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 14:47:04 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 14:47:05 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 14:47:05 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-1.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:05 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:47:05 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:47:06 | demand-backup/25-read-data | + data=100500 logger.go:42: 14:47:06 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-azure-1 --from-literal=data=100500 logger.go:42: 14:47:07 | demand-backup/25-read-data | configmap/06-read-data-azure-1 created logger.go:42: 14:47:07 | demand-backup/25-read-data | + for i in 0 1 2 logger.go:42: 14:47:07 | demand-backup/25-read-data | ++ run_mysql 'SELECT * FROM myDB.myTable' '-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:07 | demand-backup/25-read-data | ++ local 'command=SELECT * FROM myDB.myTable' logger.go:42: 14:47:07 | demand-backup/25-read-data | ++ local 'uri=-h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:07 | demand-backup/25-read-data | ++ local pod= logger.go:42: 14:47:07 | demand-backup/25-read-data | +++ get_client_pod logger.go:42: 14:47:07 | demand-backup/25-read-data | +++ kubectl -n kuttl-test-hardy-killdeer get pods --selector=name=mysql-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 14:47:08 | demand-backup/25-read-data | ++ client_pod=mysql-client logger.go:42: 14:47:08 | demand-backup/25-read-data | ++ wait_pod mysql-client logger.go:42: 14:47:08 | demand-backup/25-read-data | ++ local pod=mysql-client logger.go:42: 14:47:08 | demand-backup/25-read-data | ++ set +o xtrace logger.go:42: 14:47:09 | demand-backup/25-read-data | mysql-clienttrue logger.go:42: 14:47:09 | demand-backup/25-read-data | ++ kubectl -n kuttl-test-hardy-killdeer exec mysql-client -- bash -c 'printf '\''%s\n'\'' "SELECT * FROM myDB.myTable" | mysql -sN -h demand-backup-mysql-2.demand-backup-mysql -uroot -proot_password' logger.go:42: 14:47:09 | demand-backup/25-read-data | ++ sed -e 's/mysql: //' logger.go:42: 14:47:09 | demand-backup/25-read-data | ++ grep -v 'Using a password on the command line interface can be insecure.' logger.go:42: 14:47:10 | demand-backup/25-read-data | + data=100500 logger.go:42: 14:47:10 | demand-backup/25-read-data | + kubectl create configmap -n kuttl-test-hardy-killdeer 06-read-data-azure-2 --from-literal=data=100500 logger.go:42: 14:47:11 | demand-backup/25-read-data | configmap/06-read-data-azure-2 created logger.go:42: 14:47:13 | demand-backup/25-read-data | test step completed 25-read-data logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | starting test step 26-delete-all-backups logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete ps-backup --all -n "${NAMESPACE}" backup_name_minio="demand-backup-minio" accessKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_ACCESS_KEY_ID}' | base64 -d)" secretKey="$(kubectl -n "${NAMESPACE}" get secret minio-secret -o jsonpath='{.data.AWS_SECRET_ACCESS_KEY}' | base64 -d)" backup_exists=$( kubectl run -n "${NAMESPACE}" -i --rm aws-cli --image=perconalab/awscli --restart=Never -- \ /usr/bin/env AWS_ACCESS_KEY_ID="${accessKey}" AWS_SECRET_ACCESS_KEY="${secretKey}" AWS_DEFAULT_REGION=us-east-1 \ /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls 'operator-testing/' | grep -c "${backup_name_minio}/" | cat exit "${PIPESTATUS[0]}" ) if [[ 1 -eq $backup_exists ]]; then echo "Backup was not removed from bucket -- minio" exit 1 fi] logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | + source ../../functions logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ realpath ../../.. logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | ++ source /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/vars.sh logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-ps-operator_PR-424 logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/deploy logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/conf logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | ++++ mktemp -d logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export TEMP_DIR=/tmp/tmp.Ur0bDs4Fep logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ TEMP_DIR=/tmp/tmp.Ur0bDs4Fep logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export GIT_BRANCH=PR-424 logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ GIT_BRANCH=PR-424 logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export VERSION=PR-424-70568ae logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ VERSION=PR-424-70568ae logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE=perconalab/percona-server-mysql-operator:PR-424-70568ae logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_MYSQL=perconalab/percona-server-mysql-operator:main-psmysql logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_BACKUP=perconalab/percona-server-mysql-operator:main-backup logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_ORCHESTRATOR=perconalab/percona-server-mysql-operator:main-orchestrator logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_ROUTER=perconalab/percona-server-mysql-operator:main-router logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_TOOLKIT=perconalab/percona-server-mysql-operator:main-toolkit logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_HAPROXY=perconalab/percona-server-mysql-operator:main-haproxy logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM=perconalab/pmm-client:dev-latest logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM_SERVER_REPO=perconalab/pmm-server logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ export IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ IMAGE_PMM_SERVER_TAG=dev-latest logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | ++++ which gdate logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | which: no gdate in (/mnt/jenkins/workspace/cloud-ps-operator_PR-424/bin/:/home/ec2-user/google-cloud-sdk/bin:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | ++++ which date logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ date=/usr/bin/date logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ command -v oc logger.go:42: 14:47:13 | demand-backup/26-delete-all-backups | +++ oc get projects logger.go:42: 14:47:18 | demand-backup/26-delete-all-backups | error: the server doesn't have a resource type "projects" logger.go:42: 14:47:18 | demand-backup/26-delete-all-backups | +++ kubectl get nodes logger.go:42: 14:47:18 | demand-backup/26-delete-all-backups | +++ grep '^minikube' logger.go:42: 14:47:19 | demand-backup/26-delete-all-backups | ++++ pwd logger.go:42: 14:47:19 | demand-backup/26-delete-all-backups | +++ basename /mnt/jenkins/workspace/cloud-ps-operator_PR-424/e2e-tests/tests/demand-backup logger.go:42: 14:47:19 | demand-backup/26-delete-all-backups | ++ test_name=demand-backup logger.go:42: 14:47:19 | demand-backup/26-delete-all-backups | + kubectl delete ps-backup --all -n kuttl-test-hardy-killdeer logger.go:42: 14:47:20 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-azure" deleted logger.go:42: 14:47:20 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-gcp" deleted logger.go:42: 14:47:20 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-minio" deleted logger.go:42: 14:47:20 | demand-backup/26-delete-all-backups | perconaservermysqlbackup.ps.percona.com "demand-backup-s3" deleted logger.go:42: 14:47:23 | demand-backup/26-delete-all-backups | + backup_name_minio=demand-backup-minio logger.go:42: 14:47:23 | demand-backup/26-delete-all-backups | ++ kubectl -n kuttl-test-hardy-killdeer get secret minio-secret -o 'jsonpath={.data.AWS_ACCESS_KEY_ID}' logger.go:42: 14:47:23 | demand-backup/26-delete-all-backups | ++ base64 -d logger.go:42: 14:47:24 | demand-backup/26-delete-all-backups | + accessKey='some-access$\n"-key' logger.go:42: 14:47:24 | demand-backup/26-delete-all-backups | ++ kubectl -n kuttl-test-hardy-killdeer get secret minio-secret -o 'jsonpath={.data.AWS_SECRET_ACCESS_KEY}' logger.go:42: 14:47:24 | demand-backup/26-delete-all-backups | ++ base64 -d logger.go:42: 14:47:24 | demand-backup/26-delete-all-backups | + secretKey='some-$\n"secret-key' logger.go:42: 14:47:24 | demand-backup/26-delete-all-backups | ++ kubectl run -n kuttl-test-hardy-killdeer -i --rm aws-cli --image=perconalab/awscli --restart=Never -- /usr/bin/env 'AWS_ACCESS_KEY_ID=some-access$\n"-key' 'AWS_SECRET_ACCESS_KEY=some-$\n"secret-key' AWS_DEFAULT_REGION=us-east-1 /usr/bin/aws --endpoint-url http://minio-service:9000 s3 ls operator-testing/ logger.go:42: 14:47:24 | demand-backup/26-delete-all-backups | ++ grep -c demand-backup-minio/ logger.go:42: 14:47:24 | demand-backup/26-delete-all-backups | ++ cat logger.go:42: 14:47:30 | demand-backup/26-delete-all-backups | ++ exit 0 logger.go:42: 14:47:30 | demand-backup/26-delete-all-backups | + backup_exists=0 logger.go:42: 14:47:30 | demand-backup/26-delete-all-backups | + [[ 1 -eq 0 ]] logger.go:42: 14:47:31 | demand-backup/26-delete-all-backups | test step completed 26-delete-all-backups logger.go:42: 14:47:31 | demand-backup/27-drop-finalizer | starting test step 27-drop-finalizer logger.go:42: 14:47:32 | demand-backup/27-drop-finalizer | PerconaServerMySQL:kuttl-test-hardy-killdeer/demand-backup updated logger.go:42: 14:47:32 | demand-backup/27-drop-finalizer | test step completed 27-drop-finalizer logger.go:42: 14:47:33 | demand-backup | demand-backup events from ns kuttl-test-hardy-killdeer: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:06:53 +0000 UTC Normal Deployment.apps percona-server-mysql-operator ScalingReplicaSet Scaled up replica set percona-server-mysql-operator-6b56d66f99 to 1 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:06:54 +0000 UTC Normal Pod percona-server-mysql-operator-6b56d66f99-fn9ww Scheduled Successfully assigned kuttl-test-hardy-killdeer/percona-server-mysql-operator-6b56d66f99-fn9ww to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:06:54 +0000 UTC Normal ReplicaSet.apps percona-server-mysql-operator-6b56d66f99 SuccessfulCreate Created pod: percona-server-mysql-operator-6b56d66f99-fn9ww logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:06:56 +0000 UTC Normal Pod percona-server-mysql-operator-6b56d66f99-fn9ww.spec.containers{manager} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:06:57 +0000 UTC Normal Pod mysql-client Scheduled Successfully assigned kuttl-test-hardy-killdeer/mysql-client to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:06:58 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulling Pulling image "percona/percona-server:8.0.25" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:05 +0000 UTC Normal ReplicaSet.apps minio-service-6f47578c5b SuccessfulCreate Created pod: minio-service-6f47578c5b-snpfm logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:05 +0000 UTC Normal PersistentVolumeClaim minio-service WaitForFirstConsumer waiting for first consumer to be created before binding logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:05 +0000 UTC Normal Deployment.apps minio-service ScalingReplicaSet Scaled up replica set minio-service-6f47578c5b to 1 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:05 +0000 UTC Normal PersistentVolumeClaim minio-service ExternalProvisioning waiting for a volume to be created, either by external provisioner "pd.csi.storage.gke.io" or manually created by system administrator logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:05 +0000 UTC Normal PersistentVolumeClaim minio-service Provisioning External provisioner is provisioning volume for claim "kuttl-test-hardy-killdeer/minio-service" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:09 +0000 UTC Normal PersistentVolumeClaim minio-service ProvisioningSucceeded Successfully provisioned volume pvc-79bc49c6-09b9-49a0-ba66-14cf9dbb70c7 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:09 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Pulled Successfully pulled image "percona/percona-server:8.0.25" in 10.969593187s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:09 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Created Created container mysql-client logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:09 +0000 UTC Normal Pod mysql-client.spec.containers{mysql-client} Started Started container mysql-client logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:10 +0000 UTC Normal Lease.coordination.k8s.io 08db2feb.percona.com LeaderElection percona-server-mysql-operator-6b56d66f99-fn9ww_f7f6f3e4-70e1-4dbc-b093-e26c735cdd24 became leader logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:10 +0000 UTC Normal Pod minio-service-6f47578c5b-snpfm Scheduled Successfully assigned kuttl-test-hardy-killdeer/minio-service-6f47578c5b-snpfm to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:10 +0000 UTC Normal Pod percona-server-mysql-operator-6b56d66f99-fn9ww.spec.containers{manager} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 14.035737581s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:10 +0000 UTC Normal Pod percona-server-mysql-operator-6b56d66f99-fn9ww.spec.containers{manager} Created Created container manager logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:10 +0000 UTC Normal Pod percona-server-mysql-operator-6b56d66f99-fn9ww.spec.containers{manager} Started Started container manager logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:16 +0000 UTC Normal Pod minio-service-6f47578c5b-snpfm SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-79bc49c6-09b9-49a0-ba66-14cf9dbb70c7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:17 +0000 UTC Normal Pod minio-service-6f47578c5b-snpfm.spec.containers{minio} Pulling Pulling image "minio/minio:RELEASE.2020-11-19T23-48-16Z" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:19 +0000 UTC Normal Pod minio-service-6f47578c5b-snpfm.spec.containers{minio} Pulled Successfully pulled image "minio/minio:RELEASE.2020-11-19T23-48-16Z" in 2.246227569s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:19 +0000 UTC Normal Pod minio-service-6f47578c5b-snpfm.spec.containers{minio} Created Created container minio logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:19 +0000 UTC Normal Pod minio-service-6f47578c5b-snpfm.spec.containers{minio} Started Started container minio logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:21 +0000 UTC Normal Pod aws-cli Scheduled Successfully assigned kuttl-test-hardy-killdeer/aws-cli to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:22 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:26 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 3.831183435s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:26 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container aws-cli logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:26 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:43 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 WaitForFirstConsumer waiting for first consumer to be created before binding logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:43 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ExternalProvisioning waiting for a volume to be created, either by external provisioner "pd.csi.storage.gke.io" or manually created by system administrator logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:43 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-hardy-killdeer/datadir-demand-backup-mysql-0" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:43 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-0 Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql success logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:43 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:43 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:43 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:47 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-0 ProvisioningSucceeded Successfully provisioned volume pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:47 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:55 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:56 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 13.619859186s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:07:57 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:04 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:07 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 2.631288691s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:07 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:07 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:07 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:07 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 372.949233ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:07 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:07 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 13.988858122s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:16 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:37 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 21.106137141s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:37 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:37 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:37 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:38 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:38 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:40 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:49 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 11.912484162s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:50 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:50 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 9.790485204s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:50 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:50 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:08:55 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 13.451972177s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:06 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 10.938964771s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:06 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:06 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:06 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:06 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 377.100794ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:06 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:06 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:07 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 WaitForFirstConsumer waiting for first consumer to be created before binding logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:07 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ExternalProvisioning waiting for a volume to be created, either by external provisioner "pd.csi.storage.gke.io" or manually created by system administrator logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:07 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 Provisioning External provisioner is provisioning volume for claim "kuttl-test-hardy-killdeer/datadir-demand-backup-mysql-1" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:07 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-1 Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql success logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:07 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:11 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-1 ProvisioningSucceeded Successfully provisioned volume pvc-d530ed39-6d4a-4011-82b7-815cc42c282b logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:11 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:14 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:14 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:16 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:16 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 391.837206ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:16 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:16 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:19 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d530ed39-6d4a-4011-82b7-815cc42c282b" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 398.345612ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:20 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:22 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:22 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 4.053831453s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 379.565331ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:27 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:27 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:27 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:39 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:39 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulCreate create Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:40 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:40 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 428.161269ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:40 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:40 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 21.188210187s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:43 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 15.340912937s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 2.361592873s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 357.470985ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:54 +0000 UTC Warning Pod demand-backup-orc-2.spec.containers{orc} Unhealthy Liveness probe failed: Get "http://10.16.89.11:3000/api/lb-check": dial tcp 10.16.89.11:3000: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:56 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:57 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 13.980661938s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:57 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:58 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:09:58 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:01 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 5.174026471s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:01 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 14.615266677s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 10.973290003s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:13 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:13 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:13 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulCreate create Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:14 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 430.459352ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:15 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:21 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 4.02761792s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:21 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:21 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:21 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:22 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 454.895353ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:22 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:22 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:22 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:22 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 355.149387ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:10:28 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 371.743182ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:00 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 WaitForFirstConsumer waiting for first consumer to be created before binding logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:00 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ExternalProvisioning waiting for a volume to be created, either by external provisioner "pd.csi.storage.gke.io" or manually created by system administrator logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:00 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 Provisioning External provisioner is provisioning volume for claim "kuttl-test-hardy-killdeer/datadir-demand-backup-mysql-2" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:00 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Claim datadir-demand-backup-mysql-2 Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql success logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:00 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulCreate create Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:04 +0000 UTC Normal PersistentVolumeClaim datadir-demand-backup-mysql-2 ProvisioningSucceeded Successfully provisioned volume pvc-f5e13a5c-dd64-4dd9-a75a-7375fa777d5c logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:04 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:11 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f5e13a5c-dd64-4dd9-a75a-7375fa777d5c" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 422.565509ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:13 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:15 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:36 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 21.210397528s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:36 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:36 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:37 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 12.137994784s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:11:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:12:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 11.012718747s logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:12:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:12:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:12:03 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:12:03 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:12:12 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 356.417546ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:13 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:13 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:13 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:15 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2023/08/17 14:13:15 readiness check failed: connect to db: ping database: dial tcp 10.16.90.6:33062: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:20 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2023/08/17 14:13:20 readiness check failed: connect to db: ping database: dial tcp 10.16.90.6:33062: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:25 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:33 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 435.240608ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 377.707779ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 374.739721ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 364.075815ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:13:45 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:14:03 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:14:03 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:14:06 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 361.267609ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:23 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58 Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-demand-backup-minio-minio-kdw58 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:23 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio SuccessfulCreate Created pod: xb-demand-backup-minio-minio-kdw58 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:24 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:24 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 434.753225ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:24 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:24 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:26 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:26 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 420.611736ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:26 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:27 +0000 UTC Normal Pod xb-demand-backup-minio-minio-kdw58.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:15:33 +0000 UTC Normal Job.batch xb-demand-backup-minio-minio Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-2 in StatefulSet demand-backup-haproxy successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-2 in StatefulSet demand-backup-mysql successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:51 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-2 in StatefulSet demand-backup-orc successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:52 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:52 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:52 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-1 in StatefulSet demand-backup-haproxy successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:52 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Readiness probe failed: 2023/08/17 14:16:52 readiness check failed: connect to db: ping database: dial tcp 10.16.89.13:33062: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:16:54 +0000 UTC Normal StatefulSet.apps demand-backup-haproxy SuccessfulDelete delete Pod demand-backup-haproxy-0 in StatefulSet demand-backup-haproxy successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:12 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-1 in StatefulSet demand-backup-mysql successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:21 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:21 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-1 in StatefulSet demand-backup-orc successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:31 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:33 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:33 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:33 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:33 +0000 UTC Normal StatefulSet.apps demand-backup-mysql SuccessfulDelete delete Pod demand-backup-mysql-0 in StatefulSet demand-backup-mysql successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:53 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:53 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:17:53 +0000 UTC Normal StatefulSet.apps demand-backup-orc SuccessfulDelete delete Pod demand-backup-orc-0 in StatefulSet demand-backup-orc successful logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-restore-demand-backup-restore-minio-7rc2b to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:25 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-7rc2b logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:32 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:33 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 406.414808ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:36 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:36 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 399.598742ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:36 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:36 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-7rc2b.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:46 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:49 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:49 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:49 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:50 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 404.402509ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:50 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:50 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 374.885686ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:52 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:53 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 362.898646ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:53 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:18:53 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:00 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:01 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 422.686155ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:02 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 378.903985ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:04 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 359.123777ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 345.484908ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:05 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:24 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:25 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:25 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 564.767117ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:25 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:26 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 426.765713ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:27 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 373.851414ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:28 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:36 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:42 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:43 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:43 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 395.999696ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:43 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:43 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:44 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d530ed39-6d4a-4011-82b7-815cc42c282b" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 400.067682ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:45 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:46 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 389.172937ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:46 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:46 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:46 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 473.219312ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:46 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:46 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:47 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 379.543297ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:48 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:48 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 521.847331ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:48 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:48 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:48 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 401.824533ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:48 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:48 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:48 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:49 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 717.183373ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:49 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:49 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:50 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:50 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 383.842978ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:50 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:50 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:50 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:51 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 363.513173ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:51 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:51 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:51 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:52 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:52 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 404.844336ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:52 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:52 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:54 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:54 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 366.306226ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:54 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:54 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:54 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 375.952997ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:19:55 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:00 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:00 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 382.288255ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:01 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 389.84093ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:03 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:04 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 378.487617ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:04 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:04 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:06 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:06 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:10 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 375.76666ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:40 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:48 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f5e13a5c-dd64-4dd9-a75a-7375fa777d5c" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 495.386284ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:50 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 414.637705ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 367.2416ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 330.691723ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:20:52 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:21:10 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:21:10 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:21:14 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 370.375413ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:26 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:26 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:26 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:35 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:36 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2023/08/17 14:23:36 readiness check failed: connect to db: ping database: dial tcp 10.16.90.12:33062: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:41 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:23:57 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:24:05 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:24:28 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:24:28 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:24:29 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:24:59 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-restore-demand-backup-restore-minio-backup-source-kcnjd to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:24:59 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source SuccessfulCreate Created pod: xb-restore-demand-backup-restore-minio-backup-source-kcnjd logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:04 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:08 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:08 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 388.303526ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:08 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:08 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:10 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:10 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 398.197835ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:10 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:10 +0000 UTC Normal Pod xb-restore-demand-backup-restore-minio-backup-source-kcnjd.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:20 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-minio-backup-source Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:24 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:24 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:25 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:25 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 429.781131ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:25 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:25 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:27 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:27 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 402.937691ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:27 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:27 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:27 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 367.634149ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:28 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:28 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 351.976513ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:28 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:28 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 386.563587ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:30 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 368.571417ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 387.460096ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:31 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:25:59 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:00 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:00 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 418.934449ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:00 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:00 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:02 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:02 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:02 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 427.46612ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:02 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:03 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:03 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:03 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 386.599554ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:03 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:03 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:05 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:06 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 399.728858ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:07 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 370.37543ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:08 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:09 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:09 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 369.21707ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:09 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:09 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:09 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:10 +0000 UTC Warning Pod demand-backup-haproxy-1 FailedMount MountVolume.SetUp failed for volume "config" : failed to sync configmap cache: timed out waiting for the condition logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:10 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d530ed39-6d4a-4011-82b7-815cc42c282b" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:11 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 412.294403ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:12 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 596.790672ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:14 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:14 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 373.268528ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:14 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:14 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 635.894836ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 725.848394ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:15 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:16 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 640.78294ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 373.47806ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 389.317034ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:17 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:18 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:19 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:19 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 409.352731ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:19 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:19 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:19 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 371.593345ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:20 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:32 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:35 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:36 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 368.362324ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:36 +0000 UTC Warning Pod demand-backup-orc-2 FailedMount MountVolume.SetUp failed for volume "config" : failed to sync configmap cache: timed out waiting for the condition logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 410.309094ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:37 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:39 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:39 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 372.033582ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:39 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:39 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:39 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:40 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 382.594286ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:40 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:26:40 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:06 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:14 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f5e13a5c-dd64-4dd9-a75a-7375fa777d5c" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:17 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 424.011542ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:18 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:19 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:19 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 347.154567ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:19 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 446.615703ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:20 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:21 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 404.363783ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:21 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:21 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:38 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:27:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 404.993549ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:39 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-demand-backup-s3-aws-s3-fk6hj to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:39 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 SuccessfulCreate Created pod: xb-demand-backup-s3-aws-s3-fk6hj logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:40 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:40 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 420.403439ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:40 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:41 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:42 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:43 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 408.204249ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:43 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:43 +0000 UTC Normal Pod xb-demand-backup-s3-aws-s3-fk6hj.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:28:50 +0000 UTC Normal Job.batch xb-demand-backup-s3-aws-s3 Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:27 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:29 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:31 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:31 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:31 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:35 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:37 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2023/08/17 14:29:37 readiness check failed: connect to db: ping database: dial tcp 10.16.90.16:33062: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:42 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:58 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:29:58 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:30:29 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:30:29 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:30:59 +0000 UTC Warning Pod demand-backup-orc-0.spec.containers{orc} Unhealthy Readiness probe failed: Get "http://10.16.88.10:3000/api/health": dial tcp 10.16.88.10:3000: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:01 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-restore-demand-backup-restore-s3-c7t4c to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:01 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 SuccessfulCreate Created pod: xb-restore-demand-backup-restore-s3-c7t4c logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:10 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:11 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:12 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 402.304083ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:12 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:12 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:13 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:13 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 390.276447ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:13 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:13 +0000 UTC Normal Pod xb-restore-demand-backup-restore-s3-c7t4c.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:26 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-s3 Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:30 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:30 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 394.745618ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:31 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 421.840523ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:33 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:34 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 399.933286ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:34 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:34 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:37 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 340.772592ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:39 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:41 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 432.854601ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 381.405174ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 369.285993ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:42 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:31:43 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:05 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:06 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 388.956227ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:07 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:08 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 347.317094ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 352.91501ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:09 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:13 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:16 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:17 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:17 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 392.973711ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:17 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:17 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:18 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d530ed39-6d4a-4011-82b7-815cc42c282b" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:19 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 372.719285ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 364.828279ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:20 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:20 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:21 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 389.637316ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:22 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:23 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 380.80183ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:23 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:24 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:24 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:24 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:24 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 696.288278ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:24 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:24 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 587.124365ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:24 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:24 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:25 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:25 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:25 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 374.753576ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 404.991178ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 337.948905ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:26 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 747.906838ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:27 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:27 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 390.178875ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:28 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:29 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 362.529677ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:29 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:29 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:41 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 373.227749ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:44 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:44 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 424.25822ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:44 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 752.719354ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:45 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:32:48 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 371.587839ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:18 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:22 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f5e13a5c-dd64-4dd9-a75a-7375fa777d5c" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 412.875046ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:27 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:29 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 402.290505ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 377.015545ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:30 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 414.20349ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:31 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:47 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:48 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:33:51 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 395.975429ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:55 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-demand-backup-gcp-gcp-cs-wd5zp to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:55 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:55 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs SuccessfulCreate Created pod: xb-demand-backup-gcp-gcp-cs-wd5zp logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:56 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 407.030267ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:56 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:56 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:58 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:58 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 377.474481ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:58 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:34:58 +0000 UTC Normal Pod xb-demand-backup-gcp-gcp-cs-wd5zp.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:05 +0000 UTC Normal Job.batch xb-demand-backup-gcp-gcp-cs Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:42 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:42 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:43 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:44 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:45 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:47 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:48 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:52 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:53 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2023/08/17 14:35:53 readiness check failed: connect to db: ping database: dial tcp 10.16.90.21:33062: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:35:58 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:36:13 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:36:13 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:36:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:36:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:14 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:17 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-restore-demand-backup-restore-gcp-fb2wq to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:17 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp SuccessfulCreate Created pod: xb-restore-demand-backup-restore-gcp-fb2wq logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:22 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:23 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:24 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 423.340839ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:24 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:24 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 391.327249ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:26 +0000 UTC Normal Pod xb-restore-demand-backup-restore-gcp-fb2wq.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:38 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-gcp Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:41 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:41 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:42 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:43 +0000 UTC Warning Pod demand-backup-mysql-0 FailedMount MountVolume.SetUp failed for volume "users" : failed to sync secret cache: timed out waiting for the condition logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:43 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 392.456742ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:43 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:43 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 413.976036ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 346.470267ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:44 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 463.968334ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:45 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:46 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 397.777094ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 324.519616ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:47 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 374.816279ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:37:48 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:17 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:18 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:18 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 404.854194ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:18 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:18 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:19 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:19 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 358.593059ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 499.431176ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:23 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:23 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 357.334277ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:24 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 367.43054ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 364.071722ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:26 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:27 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:27 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d530ed39-6d4a-4011-82b7-815cc42c282b" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 411.165445ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:28 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:29 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:29 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 416.197644ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 634.668464ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 646.823348ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:30 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:31 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:31 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 370.497076ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 378.223322ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 383.61134ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:32 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:33 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:33 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 387.540404ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:33 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:33 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:34 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:34 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 366.754561ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:34 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:35 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:35 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:35 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 369.581688ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:35 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:35 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:49 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:49 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:52 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:53 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:54 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 603.53212ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:54 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 400.113305ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:54 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:54 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:55 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 386.705882ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 404.498102ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:38:56 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:29 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:36 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f5e13a5c-dd64-4dd9-a75a-7375fa777d5c" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 403.780277ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:38 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:39 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 377.72201ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 396.511893ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:40 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 384.481292ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:41 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:58 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:39:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:40:02 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 394.665876ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:40:58 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-demand-backup-azure-azure-blob-c2p7z to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:40:58 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:40:58 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob SuccessfulCreate Created pod: xb-demand-backup-azure-azure-blob-c2p7z logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:40:59 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 415.53489ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:40:59 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:40:59 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:01 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:01 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 406.254233ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:01 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:01 +0000 UTC Normal Pod xb-demand-backup-azure-azure-blob-c2p7z.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:08 +0000 UTC Normal Job.batch xb-demand-backup-azure-azure-blob Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:49 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:49 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:49 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:49 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:50 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:50 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:51 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Killing Stopping container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:51 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:53 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:53 +0000 UTC Warning Endpoints demand-backup-mysql-unready FailedToUpdateEndpoint Failed to update endpoint kuttl-test-hardy-killdeer/demand-backup-mysql-unready: Operation cannot be fulfilled on endpoints "demand-backup-mysql-unready": the object has been modified; please apply your changes to the latest version and try again logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Killing Stopping container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Killing Stopping container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:57 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Killing Stopping container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:41:58 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe failed: 2023/08/17 14:41:58 readiness check failed: connect to db: ping database: dial tcp 10.16.90.26:33062: connect: connection refused logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:42:03 +0000 UTC Warning Pod demand-backup-mysql-0.spec.containers{mysql} Unhealthy Readiness probe errored: rpc error: code = Unknown desc = failed to exec in container: container is in CONTAINER_EXITED state logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:42:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:42:20 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:42:51 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Killing Stopping container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:42:51 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Killing Stopping container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:42:55 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:25 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq Scheduled Successfully assigned kuttl-test-hardy-killdeer/xb-restore-demand-backup-restore-azure-jclfq to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:25 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure SuccessfulCreate Created pod: xb-restore-demand-backup-restore-azure-jclfq logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:33 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:34 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq.spec.initContainers{xtrabackup-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:35 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq.spec.initContainers{xtrabackup-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 405.567348ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:35 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq.spec.initContainers{xtrabackup-init} Created Created container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:35 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq.spec.initContainers{xtrabackup-init} Started Started container xtrabackup-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:36 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:37 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 359.468964ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:37 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:37 +0000 UTC Normal Pod xb-restore-demand-backup-restore-azure-jclfq.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:49 +0000 UTC Normal Job.batch xb-restore-demand-backup-restore-azure Completed Job completed logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:52 +0000 UTC Warning Service demand-backup-haproxy ProcessServiceFailed error processing service "kuttl-test-hardy-killdeer/demand-backup-haproxy": services "demand-backup-haproxy" not found logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:57 +0000 UTC Normal Pod demand-backup-mysql-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:57 +0000 UTC Normal Pod demand-backup-orc-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 381.546745ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:43:58 +0000 UTC Normal Pod demand-backup-orc-0.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:00 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 369.086186ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 418.441961ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:01 +0000 UTC Normal Pod demand-backup-orc-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:09 +0000 UTC Normal Pod demand-backup-mysql-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-134dc26b-63a0-4fcd-b083-43d96f1b5bf7" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:10 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:11 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 416.808958ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:11 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:11 +0000 UTC Normal Pod demand-backup-mysql-0.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:13 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:13 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 366.391834ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:13 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:13 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:13 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:14 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 359.575879ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:14 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:14 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:14 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:14 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 385.584637ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:14 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:14 +0000 UTC Normal Pod demand-backup-mysql-0.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:32 +0000 UTC Normal Pod demand-backup-orc-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:33 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:33 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 394.185778ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:34 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:34 +0000 UTC Normal Pod demand-backup-orc-1.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 376.285646ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:35 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:36 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 423.585077ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:36 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:36 +0000 UTC Normal Pod demand-backup-orc-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:45 +0000 UTC Normal Pod demand-backup-mysql-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:50 +0000 UTC Normal Pod demand-backup-mysql-1 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-d530ed39-6d4a-4011-82b7-815cc42c282b" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 446.143246ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:52 +0000 UTC Normal Pod demand-backup-mysql-1.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:53 +0000 UTC Normal Pod demand-backup-haproxy-0 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-0 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 441.446022ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:54 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:54 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 383.297595ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 560.127724ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:55 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 435.829213ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:56 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 374.250048ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:56 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:57 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 391.937919ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:57 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:57 +0000 UTC Normal Pod demand-backup-haproxy-0.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:58 +0000 UTC Normal Pod demand-backup-haproxy-1 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-1 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-sp1d logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 485.370091ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:44:59 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:01 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 465.84694ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 473.213701ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:02 +0000 UTC Normal Pod demand-backup-haproxy-1.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:03 +0000 UTC Normal Pod demand-backup-haproxy-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-haproxy-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:04 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:05 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 404.392133ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:05 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Created Created container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:05 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.initContainers{haproxy-init} Started Started container haproxy-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:06 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 413.621035ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Created Created container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{haproxy} Started Started container haproxy logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-haproxy" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-haproxy" in 350.055621ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:07 +0000 UTC Normal Pod demand-backup-haproxy-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:08 +0000 UTC Normal Pod demand-backup-orc-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-orc-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:09 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:09 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 381.617786ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:09 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Created Created container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:09 +0000 UTC Normal Pod demand-backup-orc-2.spec.initContainers{orc-init} Started Started container orc-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:10 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 399.957537ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Created Created container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{orc} Started Started container orc logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-orchestrator" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-orchestrator" in 360.879186ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Created Created container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:11 +0000 UTC Normal Pod demand-backup-orc-2.spec.containers{mysql-monit} Started Started container mysql-monit logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:12 +0000 UTC Warning Pod demand-backup-mysql-1.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:12 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:16 +0000 UTC Normal Pod demand-backup-mysql-1.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 359.596955ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:46 +0000 UTC Normal Pod demand-backup-mysql-2 Scheduled Successfully assigned kuttl-test-hardy-killdeer/demand-backup-mysql-2 to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-dqdg logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:55 +0000 UTC Normal Pod demand-backup-mysql-2 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-f5e13a5c-dd64-4dd9-a75a-7375fa777d5c" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:56 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulling Pulling image "perconalab/percona-server-mysql-operator:PR-424-70568ae" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:57 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:PR-424-70568ae" in 402.632853ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:57 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Created Created container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:57 +0000 UTC Normal Pod demand-backup-mysql-2.spec.initContainers{mysql-init} Started Started container mysql-init logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:58 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-psmysql" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 381.82198ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Created Created container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Started Started container mysql logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-backup" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-backup" in 408.670823ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Created Created container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{xtrabackup} Started Started container xtrabackup logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:45:59 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulling Pulling image "perconalab/percona-server-mysql-operator:main-toolkit" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:46:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-toolkit" in 415.306253ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:46:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Created Created container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:46:00 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{pt-heartbeat} Started Started container pt-heartbeat logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:46:17 +0000 UTC Warning Pod demand-backup-mysql-2.spec.containers{mysql} Unhealthy Startup probe failed: logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:46:17 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Killing Container mysql failed startup probe, will be restarted logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:46:21 +0000 UTC Normal Pod demand-backup-mysql-2.spec.containers{mysql} Pulled Successfully pulled image "perconalab/percona-server-mysql-operator:main-psmysql" in 470.544299ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:47:25 +0000 UTC Normal Pod aws-cli Scheduled Successfully assigned kuttl-test-hardy-killdeer/aws-cli to gke-jen-ps-424-70568ae-7-default-pool-85f5277f-pl12 logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:47:26 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulling Pulling image "perconalab/awscli" logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:47:27 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Pulled Successfully pulled image "perconalab/awscli" in 351.265018ms logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:47:27 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Created Created container aws-cli logger.go:42: 14:47:33 | demand-backup | 2023-08-17 14:47:27 +0000 UTC Normal Pod aws-cli.spec.containers{aws-cli} Started Started container aws-cli logger.go:42: 14:47:34 | demand-backup | Deleting namespace: kuttl-test-hardy-killdeer === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (2512.81s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup (2505.01s) PASS