=== RUN kuttl harness.go:464: starting setup harness.go:255: running tests using configured kubeconfig. harness.go:278: Successful connection to cluster at: https://34.121.116.227 harness.go:363: running tests harness.go:75: going to run test suite with timeout of 180 seconds for each step harness.go:375: testsuite: e2e-tests/tests has 22 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup === PAUSE kuttl/harness/demand-backup === CONT kuttl/harness/demand-backup logger.go:42: 16:25:53 | demand-backup | Creating namespace: kuttl-test-workable-tapir logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | starting test step 0-deploy-operator logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep deploy_operator deploy_client deploy_s3_secrets] logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + source ../../functions logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ realpath ../../.. logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++++ pwd logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++ test_name=demand-backup logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export PG_VER=17 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ PG_VER=17 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export BUCKET=pg-operator-testing logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ BUCKET=pg-operator-testing logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ export PGOV1_VER=14 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ PGOV1_VER=14 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++++ which gdate logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++++ which date logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ date=/usr/bin/date logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++++ which gsed logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++++ which sed logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ sed=/usr/bin/sed logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | +++ command -v oc logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | ++ oc get projects logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + init_temp_dir logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + rm -rf /tmp/kuttl/pg/demand-backup logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + mkdir -p /tmp/kuttl/pg/demand-backup logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + deploy_operator logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + local cw_prefix= logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + destroy_operator logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + kubectl -n pg-operator delete deployment percona-postgresql-operator --force --grace-period=0 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | deployment.apps "percona-postgresql-operator" force deleted logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + [[ -n pg-operator ]] logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | + kubectl delete namespace pg-operator --force --grace-period=0 logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 16:25:54 | demand-backup/0-deploy-operator | namespace "pg-operator" force deleted logger.go:42: 16:26:00 | demand-backup/0-deploy-operator | + [[ -n pg-operator ]] logger.go:42: 16:26:00 | demand-backup/0-deploy-operator | + create_namespace pg-operator logger.go:42: 16:26:00 | demand-backup/0-deploy-operator | + local namespace=pg-operator logger.go:42: 16:26:00 | demand-backup/0-deploy-operator | + [[ -n '' ]] logger.go:42: 16:26:00 | demand-backup/0-deploy-operator | + kubectl delete namespace pg-operator --ignore-not-found logger.go:42: 16:26:01 | demand-backup/0-deploy-operator | + kubectl wait --for=delete namespace pg-operator logger.go:42: 16:26:01 | demand-backup/0-deploy-operator | + kubectl create namespace pg-operator logger.go:42: 16:26:02 | demand-backup/0-deploy-operator | namespace/pg-operator created logger.go:42: 16:26:02 | demand-backup/0-deploy-operator | + cw_prefix=cw- logger.go:42: 16:26:02 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy/crd.yaml logger.go:42: 16:26:03 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/crunchybridgeclusters.postgres-operator.crunchydata.com serverside-applied logger.go:42: 16:26:03 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgbackups.pgv2.percona.com serverside-applied logger.go:42: 16:26:05 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgclusters.pgv2.percona.com serverside-applied logger.go:42: 16:26:05 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgrestores.pgv2.percona.com serverside-applied logger.go:42: 16:26:06 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgupgrades.pgv2.percona.com serverside-applied logger.go:42: 16:26:06 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/pgadmins.postgres-operator.crunchydata.com serverside-applied logger.go:42: 16:26:06 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/pgupgrades.postgres-operator.crunchydata.com serverside-applied logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/postgresclusters.postgres-operator.crunchydata.com serverside-applied logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy/cw-rbac.yaml logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | serviceaccount/percona-postgresql-operator serverside-applied logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-postgresql-operator serverside-applied logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-postgresql-operator serverside-applied logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | + local disable_telemetry=true logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | + '[' demand-backup == telemetry-transfer ']' logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | + yq eval '.spec.template.spec.containers[0].image = "perconalab/percona-postgresql-operator:PR-1083-44dc2e618"' /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy/cw-operator.yaml logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | + yq eval '(.spec.template.spec.containers[] | select(.name=="operator") | .env[] | select(.name=="DISABLE_TELEMETRY") | .value) = "true"' - logger.go:42: 16:26:09 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply -f - logger.go:42: 16:26:11 | demand-backup/0-deploy-operator | deployment.apps/percona-postgresql-operator created logger.go:42: 16:26:11 | demand-backup/0-deploy-operator | + deploy_client logger.go:42: 16:26:11 | demand-backup/0-deploy-operator | + kubectl -n kuttl-test-workable-tapir apply -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf/client.yaml logger.go:42: 16:26:11 | demand-backup/0-deploy-operator | deployment.apps/pg-client created logger.go:42: 16:26:11 | demand-backup/0-deploy-operator | + deploy_s3_secrets logger.go:42: 16:26:11 | demand-backup/0-deploy-operator | + set +o xtrace logger.go:42: 16:26:12 | demand-backup/0-deploy-operator | secret/demand-backup-pgbackrest-secrets created logger.go:42: 16:26:12 | demand-backup/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-postgresql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 16:26:12 | demand-backup/0-deploy-operator | ASSERT deployment percona-postgresql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 16:26:13 | demand-backup/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 16:26:14 | demand-backup/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-postgresql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 16:26:14 | demand-backup/0-deploy-operator | ASSERT deployment percona-postgresql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 16:26:15 | demand-backup/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 16:26:16 | demand-backup/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-postgresql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 16:26:16 | demand-backup/0-deploy-operator | ASSERT deployment percona-postgresql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 16:26:16 | demand-backup/0-deploy-operator | ASSERT FAIL Resource(s) not found. logger.go:42: 16:26:18 | demand-backup/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-postgresql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 16:26:18 | demand-backup/0-deploy-operator | ASSERT deployment percona-postgresql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 16:26:18 | demand-backup/0-deploy-operator | INFO Found 1 resource(s). logger.go:42: 16:26:18 | demand-backup/0-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 16:26:18 | demand-backup/0-deploy-operator | percona-postgresql-operator pg-operator 1 logger.go:42: 16:26:18 | demand-backup/0-deploy-operator | ASSERT PASS logger.go:42: 16:26:18 | demand-backup/0-deploy-operator | test step completed 0-deploy-operator logger.go:42: 16:26:18 | demand-backup/1-create-cluster | starting test step 1-create-cluster logger.go:42: 16:26:18 | demand-backup/1-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr "demand-backup" ${RANDOM} \ | yq '.metadata.finalizers=["percona.com/delete-backups"]' \ | yq '.spec.backups.pgbackrest.global.repo1-retention-full="2"' \ | yq '.spec.backups.pgbackrest.global.repo1-retention-full-type="count"' \ | yq '.spec.backups.pgbackrest.global.repo3-retention-full="2"' \ | yq '.spec.backups.pgbackrest.global.repo3-retention-full-type="count"' \ | yq '.spec.backups.pgbackrest.jobs.backoffLimit=20' \ | yq '.spec.backups.pgbackrest.jobs.restartPolicy="OnFailure"' \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + source ../../functions logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ realpath ../../.. logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++++ pwd logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++ test_name=demand-backup logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export PG_VER=17 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ PG_VER=17 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export BUCKET=pg-operator-testing logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ BUCKET=pg-operator-testing logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ export PGOV1_VER=14 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ PGOV1_VER=14 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++++ which gdate logger.go:42: 16:26:18 | demand-backup/1-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++++ which date logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ date=/usr/bin/date logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++++ which gsed logger.go:42: 16:26:18 | demand-backup/1-create-cluster | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++++ which sed logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ sed=/usr/bin/sed logger.go:42: 16:26:18 | demand-backup/1-create-cluster | +++ command -v oc logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ++ oc get projects logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + get_cr demand-backup 7285 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + local cr_name=demand-backup logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + '[' -z demand-backup ']' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + local repo_path=7285 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq '.metadata.finalizers=["percona.com/delete-backups"]' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq eval ' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .metadata.name = "demand-backup" | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .metadata.labels = {"e2e":"demand-backup"} | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.postgresVersion = 17 | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.users += [{"name":"postgres","password":{"type":"AlphaNumeric"}}] | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.users += [{"name":"demand-backup","password":{"type":"AlphaNumeric"}}] | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.image = "perconalab/percona-postgresql-operator:main-ppg17-postgres" | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.image = "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.proxy.pgBouncer.image = "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.pmm.image = "perconalab/pmm-client:dev-latest" | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.pmm.secret = "demand-backup-pmm-secret" logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo1-retention-full="2"' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ' /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy/cr.yaml logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo3-retention-full-type="count"' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq .spec.backups.pgbackrest.jobs.backoffLimit=20 logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + kubectl -n kuttl-test-workable-tapir apply -f - logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.jobs.restartPolicy="OnFailure"' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo1-retention-full-type="count"' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo3-retention-full="2"' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + [[ -n '' ]] logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + case $test_name in logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq eval -i ' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.configuration = [{"secret":{"name":"demand-backup-pgbackrest-secrets"}}] | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.manual.repoName = "repo1" | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.manual.options = ["--type=full"] | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.global.repo1-path = "/backrestrepo/postgres-operator/7285/repo1" | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.repos = [{"name":"repo1","s3":{"bucket":"pg-operator-testing","endpoint":"s3.amazonaws.com","region":"us-east-1"}}] logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ' /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + [[ demand-backup == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + yq eval -i ' logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.global.repo3-path = "/backrestrepo/postgres-operator/7285/repo3" | logger.go:42: 16:26:18 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.repos += [{"name":"repo3","azure":{"container":"pg-operator-testing"}}] logger.go:42: 16:26:18 | demand-backup/1-create-cluster | ' /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + [[ demand-backup == \s\t\a\r\t\-\f\r\o\m\-\b\a\c\k\u\p ]] logger.go:42: 16:26:18 | demand-backup/1-create-cluster | + cat /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 16:26:19 | demand-backup/1-create-cluster | perconapgcluster.pgv2.percona.com/demand-backup created logger.go:42: 16:28:35 | demand-backup/1-create-cluster | test step completed 1-create-cluster logger.go:42: 16:28:35 | demand-backup/2-write-data | starting test step 2-write-data logger.go:42: 16:28:35 | demand-backup/2-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_psql_local \ 'CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' \ "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)" run_psql_local \ '\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' \ "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)"] logger.go:42: 16:28:35 | demand-backup/2-write-data | + source ../../functions logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ realpath ../../.. logger.go:42: 16:28:35 | demand-backup/2-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:28:35 | demand-backup/2-write-data | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:28:35 | demand-backup/2-write-data | ++++ pwd logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:28:35 | demand-backup/2-write-data | ++ test_name=demand-backup logger.go:42: 16:28:35 | demand-backup/2-write-data | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:28:35 | demand-backup/2-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export PG_VER=17 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ PG_VER=17 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export BUCKET=pg-operator-testing logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ BUCKET=pg-operator-testing logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ export PGOV1_VER=14 logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ PGOV1_VER=14 logger.go:42: 16:28:35 | demand-backup/2-write-data | ++++ which gdate logger.go:42: 16:28:35 | demand-backup/2-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:28:35 | demand-backup/2-write-data | ++++ which date logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ date=/usr/bin/date logger.go:42: 16:28:35 | demand-backup/2-write-data | ++++ which gsed logger.go:42: 16:28:35 | demand-backup/2-write-data | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:28:35 | demand-backup/2-write-data | ++++ which sed logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ sed=/usr/bin/sed logger.go:42: 16:28:35 | demand-backup/2-write-data | +++ command -v oc logger.go:42: 16:28:35 | demand-backup/2-write-data | ++ oc get projects logger.go:42: 16:28:35 | demand-backup/2-write-data | ++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 16:28:35 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 16:28:35 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-workable-tapir get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 16:28:36 | demand-backup/2-write-data | ++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 16:28:36 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 16:28:36 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-workable-tapir get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 16:28:36 | demand-backup/2-write-data | + run_psql_local 'CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc logger.go:42: 16:28:36 | demand-backup/2-write-data | + local 'command=CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' logger.go:42: 16:28:36 | demand-backup/2-write-data | + local uri=postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc logger.go:42: 16:28:36 | demand-backup/2-write-data | + local driver=postgres logger.go:42: 16:28:36 | demand-backup/2-write-data | ++ get_client_pod logger.go:42: 16:28:36 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-workable-tapir get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 16:28:36 | demand-backup/2-write-data | + kubectl -n kuttl-test-workable-tapir exec pg-client-6cc584874-x66rv -- bash -c 'printf '\''CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc'\''' logger.go:42: 16:28:38 | demand-backup/2-write-data | ++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 16:28:38 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 16:28:38 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-workable-tapir get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 16:28:39 | demand-backup/2-write-data | ++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 16:28:39 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 16:28:39 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-workable-tapir get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 16:28:39 | demand-backup/2-write-data | + run_psql_local '\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc logger.go:42: 16:28:39 | demand-backup/2-write-data | + local 'command=\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' logger.go:42: 16:28:39 | demand-backup/2-write-data | + local uri=postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc logger.go:42: 16:28:39 | demand-backup/2-write-data | + local driver=postgres logger.go:42: 16:28:39 | demand-backup/2-write-data | ++ get_client_pod logger.go:42: 16:28:39 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-workable-tapir get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 16:28:39 | demand-backup/2-write-data | + kubectl -n kuttl-test-workable-tapir exec pg-client-6cc584874-x66rv -- bash -c 'printf '\''\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc'\''' [controller-runtime] log.SetLogger(...) was never called; logs will not be displayed. Detected at: > goroutine 49 [running]: > runtime/debug.Stack() > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/runtime/debug/stack.go:24 +0x5e > sigs.k8s.io/controller-runtime/pkg/log.eventuallyFulfillRoot() > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/log.go:60 +0xcd > sigs.k8s.io/controller-runtime/pkg/log.(*delegatingLogSink).WithName(0xc0002ebc00, {0x184a055, 0x14}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/log/deleg.go:147 +0x3e > github.com/go-logr/logr.Logger.WithName({{0x1acb7d8, 0xc0002ebc00}, 0x0}, {0x184a055?, 0xc0005c1f80?}) > /home/mowsiany/go/pkg/mod/github.com/go-logr/logr@v1.2.4/logr.go:336 +0x36 > sigs.k8s.io/controller-runtime/pkg/client.newClient(0x131ead3?, {0x0, 0xc00045aa80, {0x1accd90, 0xc000435900}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:122 +0xf1 > sigs.k8s.io/controller-runtime/pkg/client.New(0xc0003146c8?, {0x0, 0xc00045aa80, {0x1accd90, 0xc000435900}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.16.3/pkg/client/client.go:103 +0x7d > github.com/kudobuilder/kuttl/pkg/test/utils.NewRetryClient(0xc0003146c8, {0x0, 0xc00045aa80, {0x1accd90, 0xc000435900}, 0x0, {0x0, 0x0}, 0x0}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/utils/kubernetes.go:177 +0x127 > github.com/kudobuilder/kuttl/pkg/test.(*Harness).Client(0xc00037b208, 0xdd?) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:323 +0x18e > github.com/kudobuilder/kuttl/pkg/test.(*Step).Create(0xc0005b31e0, 0xc0003869c0, {0xc000501600, 0x19}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:177 +0x63 > github.com/kudobuilder/kuttl/pkg/test.(*Step).Run(0xc0005b31e0, 0xc0003869c0, {0xc000501600, 0x19}) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/step.go:457 +0x24a > github.com/kudobuilder/kuttl/pkg/test.(*Case).Run(0xc00012af00, 0xc0003869c0, 0xc0003a9b00) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/case.go:373 +0xaeb > github.com/kudobuilder/kuttl/pkg/test.(*Harness).RunTests.func1.1(0xc0003869c0) > /home/mowsiany/go/src/github.com/kudobuilder/kuttl/pkg/test/harness.go:401 +0x12e > testing.tRunner(0xc0003869c0, 0xc0004046a8) > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1689 +0xfb > created by testing.(*T).Run in goroutine 16 > /nix/store/wkbckbd30nlhq4dxzg64q6y4vm1xx4fk-go-1.22.1/share/go/src/testing/testing.go:1742 +0x390 logger.go:42: 16:28:41 | demand-backup/2-write-data | test step completed 2-write-data logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | starting test step 3-read-from-primary logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | running command: [sh -c set -o errexit set -o xtrace source ../../functions data=$(run_psql_local '\c myapp \\\ SELECT * from myApp;' "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)") kubectl create configmap -n "${NAMESPACE}" 03-read-from-primary --from-literal=data="${data}"] logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | + source ../../functions logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ realpath ../../.. logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++++ pwd logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++ test_name=demand-backup logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export PG_VER=17 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ PG_VER=17 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export BUCKET=pg-operator-testing logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ BUCKET=pg-operator-testing logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ export PGOV1_VER=14 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ PGOV1_VER=14 logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++++ which gdate logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++++ which date logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ date=/usr/bin/date logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++++ which gsed logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++++ which sed logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ sed=/usr/bin/sed logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ command -v oc logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | ++ oc get projects logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ local secret_name=demand-backup-pguser-postgres logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-workable-tapir get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ local secret_name=demand-backup-pguser-postgres logger.go:42: 16:28:41 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-workable-tapir get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 16:28:42 | demand-backup/3-read-from-primary | ++ run_psql_local '\c myapp \\\ SELECT * from myApp;' postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc logger.go:42: 16:28:42 | demand-backup/3-read-from-primary | ++ local 'command=\c myapp \\\ SELECT * from myApp;' logger.go:42: 16:28:42 | demand-backup/3-read-from-primary | ++ local uri=postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc logger.go:42: 16:28:42 | demand-backup/3-read-from-primary | ++ local driver=postgres logger.go:42: 16:28:42 | demand-backup/3-read-from-primary | +++ get_client_pod logger.go:42: 16:28:42 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-workable-tapir get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 16:28:42 | demand-backup/3-read-from-primary | ++ kubectl -n kuttl-test-workable-tapir exec pg-client-6cc584874-x66rv -- bash -c 'printf '\''\c myapp \\\ SELECT * from myApp;\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:gtBdMLv8DSx8510aU1tGV4l9@demand-backup-primary.kuttl-test-workable-tapir.svc'\''' logger.go:42: 16:28:44 | demand-backup/3-read-from-primary | + data=' 100500' logger.go:42: 16:28:44 | demand-backup/3-read-from-primary | + kubectl create configmap -n kuttl-test-workable-tapir 03-read-from-primary '--from-literal=data= 100500' logger.go:42: 16:28:44 | demand-backup/3-read-from-primary | configmap/03-read-from-primary created logger.go:42: 16:28:44 | demand-backup/3-read-from-primary | test step completed 3-read-from-primary logger.go:42: 16:28:44 | demand-backup/4-create-backup-s3 | starting test step 4-create-backup-s3 logger.go:42: 16:28:45 | demand-backup/4-create-backup-s3 | PerconaPGBackup:kuttl-test-workable-tapir/demand-backup-full-s3 created logger.go:42: 16:32:09 | demand-backup/4-create-backup-s3 | test step completed 4-create-backup-s3 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | starting test step 5-check-pgbackrest-info-s3 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | running command: [sh -c set -o errexit set -o xtrace source ../../functions instance=$(kubectl get -n "${NAMESPACE}" pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}') pgbackrest_info_backups=$(kubectl exec -n "${NAMESPACE}" "$instance" -c database -- pgbackrest info --output json | jq '.[0].backup[]') check_backup() { local backup_name=$1 local pgbackrest_annotation=$2 local pgbackrest_annotation_value=$3 status_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.backupName}') if [[ -z $status_backup_name ]]; then echo ".status.backupName is empty in $backup_name" exit 1 fi backup_info=$(echo "$pgbackrest_info_backups" | jq "select(.annotation.\"$pgbackrest_annotation\" == \"$pgbackrest_annotation_value\")") if [[ $status_backup_name != $(echo "$backup_info" | jq ".label" --raw-output) ]]; then echo ".status.backupName doesn't equal to label in pgbackrest info" exit 1 fi backup_job_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.jobName}') backup_job_annotation=$(echo "$backup_info" | jq '.annotation."percona.com/backup-job-name"' --raw-output) if [[ $backup_job_name != "$backup_job_annotation" ]]; then echo "Failed to get job name annotation from pgbackrest" exit 1 fi } manual_backup_name="demand-backup-full-s3" check_backup "$manual_backup_name" "percona.com/backup-name" "$manual_backup_name" replica_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup -o jsonpath='{.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}') check_backup "$replica_backup_name" "percona.com/backup-job-type" "replica-create"] logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | + source ../../functions logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ realpath ../../.. logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++++ pwd logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++ test_name=demand-backup logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PG_VER=17 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ PG_VER=17 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export BUCKET=pg-operator-testing logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ BUCKET=pg-operator-testing logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PGOV1_VER=14 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ PGOV1_VER=14 logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which gdate logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which date logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ date=/usr/bin/date logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which gsed logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which sed logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ sed=/usr/bin/sed logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | +++ command -v oc logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++ oc get projects logger.go:42: 16:32:09 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-workable-tapir pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}' logger.go:42: 16:32:10 | demand-backup/5-check-pgbackrest-info-s3 | + instance=demand-backup-instance1-g6sn-0 logger.go:42: 16:32:10 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl exec -n kuttl-test-workable-tapir demand-backup-instance1-g6sn-0 -c database -- pgbackrest info --output json logger.go:42: 16:32:10 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.[0].backup[]' logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + pgbackrest_info_backups='{ logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24218150, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3169550, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3169550 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24218150 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162721F", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/50000D8", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6094C38" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969641, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969708 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32247309, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4266111, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4266111 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32247309 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162850F", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969730, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969911 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + manual_backup_name=demand-backup-full-s3 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + check_backup demand-backup-full-s3 percona.com/backup-name demand-backup-full-s3 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + local backup_name=demand-backup-full-s3 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation=percona.com/backup-name logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation_value=demand-backup-full-s3 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-workable-tapir pg-backup demand-backup-full-s3 -o 'jsonpath={.status.backupName}' logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + status_backup_name=20250314-162850F logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + [[ -z 20250314-162850F ]] logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24218150, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3169550, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3169550 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24218150 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162721F", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/50000D8", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6094C38" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969641, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969708 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32247309, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4266111, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4266111 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32247309 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162850F", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969730, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969911 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq 'select(.annotation."percona.com/backup-name" == "demand-backup-full-s3")' logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + backup_info='{ logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32247309, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4266111, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4266111 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32247309 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162850F", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969730, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969911 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32247309, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4266111, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4266111 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32247309 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162850F", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969730, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969911 logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq .label --raw-output logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | + [[ 20250314-162850F != 20250314-162850F ]] logger.go:42: 16:32:12 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-workable-tapir pg-backup demand-backup-full-s3 -o 'jsonpath={.status.jobName}' logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_name=demand-backup-backup-lksw logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32247309, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4266111, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4266111 logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32247309 logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162850F", logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969730, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969911 logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_annotation=demand-backup-backup-lksw logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | + [[ demand-backup-backup-lksw != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\l\k\s\w ]] logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-workable-tapir pg-backup -o 'jsonpath={.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}' logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | + replica_backup_name=demand-backup-backup-48kl-xhj74 logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | + check_backup demand-backup-backup-48kl-xhj74 percona.com/backup-job-type replica-create logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | + local backup_name=demand-backup-backup-48kl-xhj74 logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation=percona.com/backup-job-type logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation_value=replica-create logger.go:42: 16:32:13 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-workable-tapir pg-backup demand-backup-backup-48kl-xhj74 -o 'jsonpath={.status.backupName}' logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | + status_backup_name=20250314-162721F logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | + [[ -z 20250314-162721F ]] logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24218150, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3169550, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3169550 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24218150 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162721F", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/50000D8", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6094C38" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969641, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969708 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32247309, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4266111, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4266111 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32247309 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162850F", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969730, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969911 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq 'select(.annotation."percona.com/backup-job-type" == "replica-create")' logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | + backup_info='{ logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24218150, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3169550, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3169550 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24218150 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162721F", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/50000D8", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6094C38" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969641, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969708 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq .label --raw-output logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24218150, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3169550, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3169550 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24218150 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162721F", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/50000D8", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6094C38" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969641, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969708 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | + [[ 20250314-162721F != 20250314-162721F ]] logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-workable-tapir pg-backup demand-backup-backup-48kl-xhj74 -o 'jsonpath={.status.jobName}' logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_name=demand-backup-backup-48kl logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000005", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.54.0" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24218150, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3169550, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3169550 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24218150 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20250314-162721F", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/50000D8", logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6094C38" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1741969641, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1741969708 logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_annotation=demand-backup-backup-48kl logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | + [[ demand-backup-backup-48kl != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\4\8\k\l ]] logger.go:42: 16:32:14 | demand-backup/5-check-pgbackrest-info-s3 | test step completed 5-check-pgbackrest-info-s3 logger.go:42: 16:32:14 | demand-backup/6-create-backup-azure | starting test step 6-create-backup-azure logger.go:42: 16:32:14 | demand-backup/6-create-backup-azure | PerconaPGBackup:kuttl-test-workable-tapir/demand-backup-full-azure created logger.go:42: 16:33:34 | demand-backup/6-create-backup-azure | test step completed 6-create-backup-azure logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | starting test step 7-check-pgbackrest-info-azure logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | running command: [sh -c set -o errexit set -o xtrace source ../../functions instance=$(kubectl get -n "${NAMESPACE}" pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}') pgbackrest_info_backups=$(kubectl exec -n "${NAMESPACE}" "$instance" -c database -- pgbackrest info --output json | jq '.[0].backup[]') check_backup() { local backup_name=$1 local pgbackrest_annotation=$2 local pgbackrest_annotation_value=$3 status_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.backupName}') if [[ -z $status_backup_name ]]; then echo ".status.backupName is empty in $backup_name" exit 1 fi backup_info=$(echo "$pgbackrest_info_backups" | jq "select(.annotation.\"$pgbackrest_annotation\" == \"$pgbackrest_annotation_value\")") if [[ $status_backup_name != $(echo "$backup_info" | jq ".label" --raw-output) ]]; then echo ".status.backupName doesn't equal to label in pgbackrest info" exit 1 fi backup_job_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.jobName}') backup_job_annotation=$(echo "$backup_info" | jq '.annotation."percona.com/backup-job-name"' --raw-output) if [[ $backup_job_name != "$backup_job_annotation" ]]; then echo "Failed to get job name annotation from pgbackrest" exit 1 fi } manual_backup_name="demand-backup-full-azure" check_backup "$manual_backup_name" "percona.com/backup-name" "$manual_backup_name" replica_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup -o jsonpath='{.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}') check_backup "$replica_backup_name" "percona.com/backup-job-type" "replica-create"] logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | + source ../../functions logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ realpath ../../.. logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++++ pwd logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++ test_name=demand-backup logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export PG_VER=17 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ PG_VER=17 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export BUCKET=pg-operator-testing logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ BUCKET=pg-operator-testing logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ export PGOV1_VER=14 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ PGOV1_VER=14 logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++++ which gdate logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++++ which date logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ date=/usr/bin/date logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++++ which gsed logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++++ which sed logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ sed=/usr/bin/sed logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | +++ command -v oc logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++ oc get projects logger.go:42: 16:33:34 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-workable-tapir pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}' logger.go:42: 16:33:35 | demand-backup/7-check-pgbackrest-info-azure | + instance=demand-backup-instance1-g6sn-0 logger.go:42: 16:33:35 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl exec -n kuttl-test-workable-tapir demand-backup-instance1-g6sn-0 -c database -- pgbackrest info --output json logger.go:42: 16:33:35 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.[0].backup[]' logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | + pgbackrest_info_backups='{ logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24218150, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3169550, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "size": 3169550 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "size": 24218150 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162721F", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/50000D8", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6094C38" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969641, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969708 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247309, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4266111, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "size": 4266111 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247309 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162850F", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969730, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969911 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-7mnx", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247774, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4262918, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "size": 4262918 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247774 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-163221F", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000158" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969941, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969992 logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | + manual_backup_name=demand-backup-full-azure logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | + check_backup demand-backup-full-azure percona.com/backup-name demand-backup-full-azure logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | + local backup_name=demand-backup-full-azure logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation=percona.com/backup-name logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation_value=demand-backup-full-azure logger.go:42: 16:33:37 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-workable-tapir pg-backup demand-backup-full-azure -o 'jsonpath={.status.backupName}' logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | + status_backup_name=20250314-163221F logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | + [[ -z 20250314-163221F ]] logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | ++ jq 'select(.annotation."percona.com/backup-name" == "demand-backup-full-azure")' logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24218150, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3169550, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 3169550 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 24218150 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162721F", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/50000D8", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6094C38" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969641, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969708 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247309, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4266111, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 4266111 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247309 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162850F", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969730, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969911 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-7mnx", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247774, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4262918, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 4262918 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247774 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-163221F", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000158" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969941, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969992 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | + backup_info='{ logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-7mnx", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247774, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4262918, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 4262918 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247774 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-163221F", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000158" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969941, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969992 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-7mnx", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247774, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4262918, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 4262918 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247774 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-163221F", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000158" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969941, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969992 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | ++ jq .label --raw-output logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | + [[ 20250314-163221F != 20250314-163221F ]] logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-workable-tapir pg-backup demand-backup-full-azure -o 'jsonpath={.status.jobName}' logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_name=demand-backup-backup-7mnx logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-7mnx", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247774, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4262918, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 4262918 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247774 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-163221F", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000158" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969941, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969992 logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_annotation=demand-backup-backup-7mnx logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | + [[ demand-backup-backup-7mnx != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\7\m\n\x ]] logger.go:42: 16:33:38 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-workable-tapir pg-backup -o 'jsonpath={.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}' logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + replica_backup_name=demand-backup-backup-48kl-xhj74 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + check_backup demand-backup-backup-48kl-xhj74 percona.com/backup-job-type replica-create logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + local backup_name=demand-backup-backup-48kl-xhj74 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation=percona.com/backup-job-type logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation_value=replica-create logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-workable-tapir pg-backup demand-backup-backup-48kl-xhj74 -o 'jsonpath={.status.backupName}' logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + status_backup_name=20250314-162721F logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + [[ -z 20250314-162721F ]] logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | ++ jq 'select(.annotation."percona.com/backup-job-type" == "replica-create")' logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24218150, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3169550, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 3169550 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 24218150 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162721F", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/50000D8", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6094C38" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969641, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969708 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-lksw", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247309, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4266111, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 4266111 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247309 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162850F", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969730, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969911 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-7mnx", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32247774, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4262918, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 4262918 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 32247774 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-163221F", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000158" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969941, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969992 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + backup_info='{ logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24218150, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3169550, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 3169550 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 24218150 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162721F", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/50000D8", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6094C38" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969641, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969708 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24218150, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3169550, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 3169550 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 24218150 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162721F", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/50000D8", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6094C38" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969641, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969708 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | ++ jq .label --raw-output logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + [[ 20250314-162721F != 20250314-162721F ]] logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-workable-tapir pg-backup demand-backup-backup-48kl-xhj74 -o 'jsonpath={.status.jobName}' logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_name=demand-backup-backup-48kl logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-48kl", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000005", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.54.0" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24218150, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3169550, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 3169550 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "size": 24218150 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "label": "20250314-162721F", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/50000D8", logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6094C38" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "start": 1741969641, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1741969708 logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_annotation=demand-backup-backup-48kl logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | + [[ demand-backup-backup-48kl != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\4\8\k\l ]] logger.go:42: 16:33:39 | demand-backup/7-check-pgbackrest-info-azure | test step completed 7-check-pgbackrest-info-azure logger.go:42: 16:33:39 | demand-backup/8-create-second-backup-s3 | starting test step 8-create-second-backup-s3 logger.go:42: 16:33:40 | demand-backup/8-create-second-backup-s3 | PerconaPGBackup:kuttl-test-workable-tapir/demand-backup-full-s3-2 created logger.go:42: 16:35:26 | demand-backup/8-create-second-backup-s3 | test step completed 8-create-second-backup-s3 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | starting test step 9-check-retention-s3 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | running command: [sh -c set -o errexit set -o xtrace source ../../functions retention_count=2 backups_count=$(kubectl -n "${NAMESPACE}" get pg-backup -o yaml | yq '.items | map(select(.metadata.name != "demand-backup-full-azure*")) | length') jobs_count=$(kubectl -n "${NAMESPACE}" get jobs -o yaml | yq '.items | map(select(.metadata.ownerReferences.[].name != "demand-backup-full-azure*")) | length') if [[ $backups_count != $retention_count ]]; then echo "There are $backups_count backups, but our retention is set to $retention_count" exit 1 fi if [[ $jobs_count != $retention_count ]]; then echo "There are $jobs_count jobs, but our retention is set to $retention_count" exit 1 fi] logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | + source ../../functions logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ realpath ../../.. logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++++ pwd logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ test_name=demand-backup logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export PG_VER=17 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ PG_VER=17 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export BUCKET=pg-operator-testing logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ BUCKET=pg-operator-testing logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ export PGOV1_VER=14 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ PGOV1_VER=14 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++++ which gdate logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++++ which date logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ date=/usr/bin/date logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++++ which gsed logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++++ which sed logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ sed=/usr/bin/sed logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | +++ command -v oc logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ oc get projects logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | + retention_count=2 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ kubectl -n kuttl-test-workable-tapir get pg-backup -o yaml logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ yq '.items | map(select(.metadata.name != "demand-backup-full-azure*")) | length' logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | + backups_count=2 logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ kubectl -n kuttl-test-workable-tapir get jobs -o yaml logger.go:42: 16:35:26 | demand-backup/9-check-retention-s3 | ++ yq '.items | map(select(.metadata.ownerReferences.[].name != "demand-backup-full-azure*")) | length' logger.go:42: 16:35:27 | demand-backup/9-check-retention-s3 | + jobs_count=2 logger.go:42: 16:35:27 | demand-backup/9-check-retention-s3 | + [[ 2 != 2 ]] logger.go:42: 16:35:27 | demand-backup/9-check-retention-s3 | + [[ 2 != 2 ]] logger.go:42: 16:35:27 | demand-backup/9-check-retention-s3 | test step completed 9-check-retention-s3 logger.go:42: 16:35:27 | demand-backup/10-create-second-backup-azure | starting test step 10-create-second-backup-azure logger.go:42: 16:35:27 | demand-backup/10-create-second-backup-azure | PerconaPGBackup:kuttl-test-workable-tapir/demand-backup-full-azure-2 created logger.go:42: 16:36:52 | demand-backup/10-create-second-backup-azure | test step completed 10-create-second-backup-azure logger.go:42: 16:36:52 | demand-backup/11-create-third-backup-azure | starting test step 11-create-third-backup-azure logger.go:42: 16:36:53 | demand-backup/11-create-third-backup-azure | PerconaPGBackup:kuttl-test-workable-tapir/demand-backup-full-azure-3 created logger.go:42: 16:38:59 | demand-backup/11-create-third-backup-azure | test step completed 11-create-third-backup-azure logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | starting test step 12-check-retention-azure logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | running command: [sh -c set -o errexit set -o xtrace source ../../functions retention_count=2 backups_count=$(kubectl -n "${NAMESPACE}" get pg-backup -o yaml | yq '.items | map(select(.metadata.name == "demand-backup-full-azure*")) | length') jobs_count=$(kubectl -n "${NAMESPACE}" get jobs -o yaml | yq '.items | map(select(.metadata.ownerReferences.[].name == "demand-backup-full-azure*")) | length') if [[ $backups_count != $retention_count ]]; then echo "There are $backups_count backups, but our retention is set to $retention_count" exit 1 fi if [[ $jobs_count != $retention_count ]]; then echo "There are $jobs_count jobs, but our retention is set to $retention_count" exit 1 fi] logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | + source ../../functions logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ realpath ../../.. logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++++ pwd logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ test_name=demand-backup logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export PG_VER=17 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ PG_VER=17 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export BUCKET=pg-operator-testing logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ BUCKET=pg-operator-testing logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ export PGOV1_VER=14 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ PGOV1_VER=14 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++++ which gdate logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++++ which date logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ date=/usr/bin/date logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++++ which gsed logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++++ which sed logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ sed=/usr/bin/sed logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | +++ command -v oc logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ oc get projects logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | + retention_count=2 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ kubectl -n kuttl-test-workable-tapir get pg-backup -o yaml logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ yq '.items | map(select(.metadata.name == "demand-backup-full-azure*")) | length' logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | + backups_count=2 logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ kubectl -n kuttl-test-workable-tapir get jobs -o yaml logger.go:42: 16:38:59 | demand-backup/12-check-retention-azure | ++ yq '.items | map(select(.metadata.ownerReferences.[].name == "demand-backup-full-azure*")) | length' logger.go:42: 16:39:00 | demand-backup/12-check-retention-azure | + jobs_count=2 logger.go:42: 16:39:00 | demand-backup/12-check-retention-azure | + [[ 2 != 2 ]] logger.go:42: 16:39:00 | demand-backup/12-check-retention-azure | + [[ 2 != 2 ]] logger.go:42: 16:39:00 | demand-backup/12-check-retention-azure | test step completed 12-check-retention-azure logger.go:42: 16:39:00 | demand-backup/13-delete-backup | starting test step 13-delete-backup logger.go:42: 16:39:00 | demand-backup/13-delete-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete pg-backup -n "${NAMESPACE}" demand-backup-full-s3 kubectl delete pg-backup -n "${NAMESPACE}" demand-backup-full-azure-2] logger.go:42: 16:39:00 | demand-backup/13-delete-backup | + source ../../functions logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ realpath ../../.. logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++++ pwd logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++ test_name=demand-backup logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export PG_VER=17 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ PG_VER=17 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export BUCKET=pg-operator-testing logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ BUCKET=pg-operator-testing logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ export PGOV1_VER=14 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ PGOV1_VER=14 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++++ which gdate logger.go:42: 16:39:00 | demand-backup/13-delete-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++++ which date logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ date=/usr/bin/date logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++++ which gsed logger.go:42: 16:39:00 | demand-backup/13-delete-backup | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++++ which sed logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ sed=/usr/bin/sed logger.go:42: 16:39:00 | demand-backup/13-delete-backup | +++ command -v oc logger.go:42: 16:39:00 | demand-backup/13-delete-backup | ++ oc get projects logger.go:42: 16:39:00 | demand-backup/13-delete-backup | + kubectl delete pg-backup -n kuttl-test-workable-tapir demand-backup-full-s3 logger.go:42: 16:39:00 | demand-backup/13-delete-backup | perconapgbackup.pgv2.percona.com "demand-backup-full-s3" deleted logger.go:42: 16:39:00 | demand-backup/13-delete-backup | + kubectl delete pg-backup -n kuttl-test-workable-tapir demand-backup-full-azure-2 logger.go:42: 16:39:01 | demand-backup/13-delete-backup | perconapgbackup.pgv2.percona.com "demand-backup-full-azure-2" deleted logger.go:42: 16:39:01 | demand-backup/13-delete-backup | test step completed 13-delete-backup logger.go:42: 16:39:01 | demand-backup/14-recreate-backup-s3 | starting test step 14-recreate-backup-s3 logger.go:42: 16:39:01 | demand-backup/14-recreate-backup-s3 | PerconaPGBackup:kuttl-test-workable-tapir/demand-backup-full-s3 created logger.go:42: 16:40:52 | demand-backup/14-recreate-backup-s3 | test step completed 14-recreate-backup-s3 logger.go:42: 16:40:52 | demand-backup/15-recreate-backup-azure | starting test step 15-recreate-backup-azure logger.go:42: 16:40:52 | demand-backup/15-recreate-backup-azure | PerconaPGBackup:kuttl-test-workable-tapir/demand-backup-full-azure created logger.go:42: 16:42:48 | demand-backup/15-recreate-backup-azure | test step completed 15-recreate-backup-azure logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | starting test step 16-check-password-leak logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions check_passwords_leak] logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | + source ../../functions logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ realpath ../../.. logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++++ pwd logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++ test_name=demand-backup logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export PG_VER=17 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ PG_VER=17 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export BUCKET=pg-operator-testing logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ BUCKET=pg-operator-testing logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ export PGOV1_VER=14 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ PGOV1_VER=14 logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++++ which gdate logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++++ which date logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ date=/usr/bin/date logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++++ which gsed logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++++ which sed logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ sed=/usr/bin/sed logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | +++ command -v oc logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++ oc get projects logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | + check_passwords_leak logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | + local secrets logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | + local passwords logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | + local pods logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++ kubectl -n kuttl-test-workable-tapir get secrets -o json logger.go:42: 16:42:48 | demand-backup/16-check-password-leak | ++ jq -r '.items[] | select(.data."password"? != null) | .data."password"' logger.go:42: 16:42:49 | demand-backup/16-check-password-leak | ++ jq -r '.items[] | select(.data."pgbouncer-password"? != null) | .data."pgbouncer-password"' logger.go:42: 16:42:49 | demand-backup/16-check-password-leak | ++ kubectl -n kuttl-test-workable-tapir get secrets -o json logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | + secrets='a1g0dmRPZE41WURGbUx3YzZFcUlUY0N4 logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | Z3RCZE1MdjhEU3g4NTEwYVUxdEdWNGw5 logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | UHtTfE8sVUNSQCkrRVl3W19venczX0s7bnxzPGhAckY=' logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ for i in '$secrets' logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ base64 -d logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ echo logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ for i in '$secrets' logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ base64 -d logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ echo logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ for i in '$secrets' logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ base64 -d logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | ++ echo logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | + passwords='kX4vdOdN5YDFmLwc6EqITcCx logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | gtBdMLv8DSx8510aU1tGV4l9 logger.go:42: 16:42:50 | demand-backup/16-check-password-leak | P{S|O,UCR@)+EYw[_ozw3_K;n|s&1 >/dev/null) if [[ $res == *$(echo "No resources found in ${NAMESPACE} namespace.")* ]]; then data=0 fi kubectl create configmap -n "${NAMESPACE}" 25-pg-backup-objects --from-literal=data="${data}"] logger.go:42: 16:47:37 | demand-backup/25-delete-cluster-with-finalizer | + kubectl delete pg -n kuttl-test-workable-tapir demand-backup logger.go:42: 16:47:37 | demand-backup/25-delete-cluster-with-finalizer | perconapgcluster.pgv2.percona.com "demand-backup" deleted logger.go:42: 16:49:23 | demand-backup/25-delete-cluster-with-finalizer | + sleep 15 logger.go:42: 16:49:38 | demand-backup/25-delete-cluster-with-finalizer | + data=1 logger.go:42: 16:49:38 | demand-backup/25-delete-cluster-with-finalizer | ++ kubectl -n kuttl-test-workable-tapir get pg-backup logger.go:42: 16:49:38 | demand-backup/25-delete-cluster-with-finalizer | + res='No resources found in kuttl-test-workable-tapir namespace.' logger.go:42: 16:49:38 | demand-backup/25-delete-cluster-with-finalizer | ++ echo 'No resources found in kuttl-test-workable-tapir namespace.' logger.go:42: 16:49:38 | demand-backup/25-delete-cluster-with-finalizer | + [[ No resources found in kuttl-test-workable-tapir namespace. == *No resources found in kuttl-test-workable-tapir namespace.* ]] logger.go:42: 16:49:38 | demand-backup/25-delete-cluster-with-finalizer | + data=0 logger.go:42: 16:49:38 | demand-backup/25-delete-cluster-with-finalizer | + kubectl create configmap -n kuttl-test-workable-tapir 25-pg-backup-objects --from-literal=data=0 logger.go:42: 16:49:38 | demand-backup/25-delete-cluster-with-finalizer | configmap/25-pg-backup-objects created logger.go:42: 16:49:39 | demand-backup/25-delete-cluster-with-finalizer | test step completed 25-delete-cluster-with-finalizer logger.go:42: 16:49:39 | demand-backup/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions remove_all_finalizers destroy_operator] logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++ CERT_MANAGER_VER=1.15.3 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/tests/demand-backup logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++ test_name=demand-backup logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/vars.sh logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/deploy logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/e2e-tests/conf logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-1083 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-1083 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export VERSION=PR-1083-44dc2e618 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ VERSION=PR-1083-44dc2e618 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1083-44dc2e618 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export PG_VER=17 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ PG_VER=17 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-ppg17-pgbouncer logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg17-postgres logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-ppg17-pgbackrest logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export BUCKET=pg-operator-testing logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ BUCKET=pg-operator-testing logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export PGOV1_TAG=1.4.0 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ PGOV1_TAG=1.4.0 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ export PGOV1_VER=14 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ PGOV1_VER=14 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++++ which date logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ date=/usr/bin/date logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++++ which gsed logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1083/bin/:/home/ec2-user/.krew/bin:/usr/local/bin:/usr/bin) logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++++ which sed logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ sed=/usr/bin/sed logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | +++ command -v oc logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++ oc get projects logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + remove_all_finalizers logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + resource_types=("pg-restore" "pg-backup" "pg") logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + for resource in '"${resource_types[@]}"' logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg-restore resources' logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg-restore resources logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-workable-tapir get pg-restore -o json logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-workable-tapir delete pg-restore demand-backup-restore --wait=0 logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | perconapgrestore.pgv2.percona.com "demand-backup-restore" deleted logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++ kubectl -n kuttl-test-workable-tapir get pg-restore demand-backup-restore -o yaml logger.go:42: 16:49:40 | demand-backup/99-remove-cluster-gracefully | ++ yq '.metadata.finalizers | length' logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | Error from server (NotFound): perconapgrestores.pgv2.percona.com "demand-backup-restore" not found logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | + [[ 0 == \0 ]] logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | + continue logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-workable-tapir delete pg-restore demand-backup-restore-azure --wait=0 logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | perconapgrestore.pgv2.percona.com "demand-backup-restore-azure" deleted logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | ++ kubectl -n kuttl-test-workable-tapir get pg-restore demand-backup-restore-azure -o yaml logger.go:42: 16:49:41 | demand-backup/99-remove-cluster-gracefully | ++ yq '.metadata.finalizers | length' logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | Error from server (NotFound): perconapgrestores.pgv2.percona.com "demand-backup-restore-azure" not found logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + [[ 0 == \0 ]] logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + continue logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + for resource in '"${resource_types[@]}"' logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg-backup resources' logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg-backup resources logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-workable-tapir get pg-backup -o json logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + for resource in '"${resource_types[@]}"' logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg resources' logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg resources logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-workable-tapir get pg -o json logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 16:49:42 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 16:49:43 | demand-backup/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 16:49:43 | demand-backup/99-remove-cluster-gracefully | + kubectl -n pg-operator delete deployment percona-postgresql-operator --force --grace-period=0 logger.go:42: 16:49:43 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 16:49:43 | demand-backup/99-remove-cluster-gracefully | deployment.apps "percona-postgresql-operator" force deleted logger.go:42: 16:49:43 | demand-backup/99-remove-cluster-gracefully | + [[ -n pg-operator ]] logger.go:42: 16:49:43 | demand-backup/99-remove-cluster-gracefully | + kubectl delete namespace pg-operator --force --grace-period=0 logger.go:42: 16:49:43 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 16:49:43 | demand-backup/99-remove-cluster-gracefully | namespace "pg-operator" force deleted logger.go:42: 16:49:49 | demand-backup/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 16:49:49 | demand-backup | demand-backup events from ns kuttl-test-workable-tapir: logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:11 +0000 UTC Normal Pod pg-client-6cc584874-x66rv Binding Scheduled Successfully assigned kuttl-test-workable-tapir/pg-client-6cc584874-x66rv to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-h2l6 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:11 +0000 UTC Normal ReplicaSet.apps pg-client-6cc584874 SuccessfulCreate Created pod: pg-client-6cc584874-x66rv replicaset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:11 +0000 UTC Normal Deployment.apps pg-client ScalingReplicaSet Scaled up replica set pg-client-6cc584874 to 1 deployment-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:12 +0000 UTC Normal Pod pg-client-6cc584874-x66rv.spec.containers{pg-client} Pulling Pulling image "perconalab/percona-distribution-postgresql:15" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:19 +0000 UTC Normal Pod demand-backup-patroni-version-check Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-patroni-version-check to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-h2l6 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:20 +0000 UTC Normal Pod demand-backup-patroni-version-check.spec.containers{patroni-version-check} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:29 +0000 UTC Normal Pod pg-client-6cc584874-x66rv.spec.containers{pg-client} Pulled Successfully pulled image "perconalab/percona-distribution-postgresql:15" in 16.7s (16.7s including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:29 +0000 UTC Normal Pod pg-client-6cc584874-x66rv.spec.containers{pg-client} Created Created container: pg-client kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:29 +0000 UTC Normal Pod pg-client-6cc584874-x66rv.spec.containers{pg-client} Started Started container pg-client kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:44 +0000 UTC Normal Pod demand-backup-patroni-version-check.spec.containers{patroni-version-check} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 23.765s (23.765s including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:44 +0000 UTC Normal Pod demand-backup-patroni-version-check.spec.containers{patroni-version-check} Created Created container: patroni-version-check kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:44 +0000 UTC Normal Pod demand-backup-patroni-version-check.spec.containers{patroni-version-check} Started Started container patroni-version-check kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:46 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-g6sn-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:46 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-gfv8-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:46 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-gfv8-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:46 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-gfv8-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-workable-tapir/demand-backup-instance1-gfv8-pgdata" pd.csi.storage.gke.io_gke-bcd11770319e4a21a7e4-8f06-ab7d-vm_098a4bb9-711c-4215-b7ea-9d51408a6e47 logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:46 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-gfv8 SuccessfulCreate create Pod demand-backup-instance1-gfv8-0 in StatefulSet demand-backup-instance1-gfv8 successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:46 +0000 UTC Normal Pod demand-backup-patroni-version-check.spec.containers{patroni-version-check} Killing Stopping container patroni-version-check kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-g6sn-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-g6sn-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-workable-tapir/demand-backup-instance1-g6sn-pgdata" pd.csi.storage.gke.io_gke-bcd11770319e4a21a7e4-8f06-ab7d-vm_098a4bb9-711c-4215-b7ea-9d51408a6e47 logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-g6sn SuccessfulCreate create Pod demand-backup-instance1-g6sn-0 in StatefulSet demand-backup-instance1-g6sn successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-gj48-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-gj48-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-workable-tapir/demand-backup-instance1-gj48-pgdata" pd.csi.storage.gke.io_gke-bcd11770319e4a21a7e4-8f06-ab7d-vm_098a4bb9-711c-4215-b7ea-9d51408a6e47 logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-gj48-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-gj48 SuccessfulCreate create Pod demand-backup-instance1-gj48-0 in StatefulSet demand-backup-instance1-gj48 successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal Pod demand-backup-repo-host-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-repo-host-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-h2l6 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Warning StatefulSet.apps demand-backup-repo-host FailedCreate create Pod demand-backup-repo-host-0 in StatefulSet demand-backup-repo-host failed error: pods "demand-backup-repo-host-0" is forbidden: error looking up service account kuttl-test-workable-tapir/demand-backup-pgbackrest: serviceaccount "demand-backup-pgbackrest" not found statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal StatefulSet.apps demand-backup-repo-host SuccessfulCreate create Pod demand-backup-repo-host-0 in StatefulSet demand-backup-repo-host successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:47 +0000 UTC Normal PostgresCluster.postgres-operator.crunchydata.com demand-backup RepoHostCreated created pgBackRest repository host StatefulSet/demand-backup-repo-host postgrescluster-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-pgbouncer-65f79457f6-8bs7h to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-t674 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-pgbouncer-65f79457f6-8lqg5 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-h2l6 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-pgbouncer-65f79457f6-fb7sf to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-65f79457f6 SuccessfulCreate Created pod: demand-backup-pgbouncer-65f79457f6-8lqg5 replicaset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-65f79457f6 SuccessfulCreate Created pod: demand-backup-pgbouncer-65f79457f6-fb7sf replicaset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-65f79457f6 SuccessfulCreate Created pod: demand-backup-pgbouncer-65f79457f6-8bs7h replicaset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-pgbouncer NoPods No matching pods found controllermanager logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal Deployment.apps demand-backup-pgbouncer ScalingReplicaSet Scaled up replica set demand-backup-pgbouncer-65f79457f6 to 3 deployment-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:48 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" in 117ms (117ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" in 100ms (100ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:49 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:50 +0000 UTC Warning Pod demand-backup-pgbouncer-65f79457f6-8bs7h FailedMount MountVolume.SetUp failed for volume "pgbouncer-config" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:50 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:50 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" in 121ms (121ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:50 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-g6sn-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-0bc88abb-51cd-4285-b9aa-c471b7de4b44 pd.csi.storage.gke.io_gke-bcd11770319e4a21a7e4-8f06-ab7d-vm_098a4bb9-711c-4215-b7ea-9d51408a6e47 logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-gfv8-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-h2l6 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-gfv8-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-6da2f7e3-ddd6-4e9e-aa2c-51bb0793a32a pd.csi.storage.gke.io_gke-bcd11770319e4a21a7e4-8f06-ab7d-vm_098a4bb9-711c-4215-b7ea-9d51408a6e47 logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-gj48-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-8cb40a3e-e79e-4af3-9946-d2ff26b4ceed pd.csi.storage.gke.io_gke-bcd11770319e4a21a7e4-8f06-ab7d-vm_098a4bb9-711c-4215-b7ea-9d51408a6e47 logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" in 122ms (122ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:51 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:52 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-g6sn-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:52 +0000 UTC Normal Pod demand-backup-instance1-gj48-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-gj48-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-t674 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:53 +0000 UTC Warning Pod demand-backup-instance1-gj48-0 FailedMount MountVolume.SetUp failed for volume "pgbackrest-server" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:54 +0000 UTC Warning Pod demand-backup-instance1-gj48-0 FailedMount MountVolume.SetUp failed for volume "patroni-config" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:54 +0000 UTC Warning Pod demand-backup-instance1-gj48-0 FailedMount MountVolume.SetUp failed for volume "pgbackrest-config" : failed to sync secret cache: timed out waiting for the condition kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:56 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-0bc88abb-51cd-4285-b9aa-c471b7de4b44" attachdetach-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:59 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:59 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 120ms (120ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:59 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:59 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:26:59 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-6da2f7e3-ddd6-4e9e-aa2c-51bb0793a32a" attachdetach-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:00 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:00 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 141ms (141ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:00 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:00 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:00 +0000 UTC Normal Pod demand-backup-instance1-gj48-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8cb40a3e-e79e-4af3-9946-d2ff26b4ceed" attachdetach-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 114ms (114ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 132ms (132ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 119ms (119ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 200ms (200ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:01 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 121ms (121ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 325ms (325ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 152ms (152ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:02 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 145ms (145ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 137ms (137ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 117ms (117ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:03 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:04 +0000 UTC Warning Pod demand-backup-instance1-g6sn-0.spec.containers{database} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:04 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:04 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 129ms (129ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:05 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 238ms (238ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:05 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:05 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:07 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 259ms (259ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 293ms (293ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:09 +0000 UTC Warning PostgresCluster.postgres-operator.crunchydata.com demand-backup UnableToCreateStanzas command terminated with exit code 50: 2025-03-14 16:27:09.432 P00 ERROR: [050]: unable to acquire lock on file '/tmp/pgbackrest/db-archive-1.lock': Resource temporarily unavailable HINT: is another pgBackRest process running? 2025-03-14 16:27:09.442 P00 ERROR: [050]: unable to acquire lock on file '/tmp/pgbackrest/db-archive-1.lock': Resource temporarily unavailable HINT: is another pgBackRest process running? postgrescluster-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:12 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 3.328s (3.328s including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:12 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:12 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:12 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:12 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 23.697s (23.697s including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:12 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:12 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:14 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 1.804s (1.804s including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:14 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:14 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" in 25.747s (25.747s including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:14 +0000 UTC Normal PostgresCluster.postgres-operator.crunchydata.com demand-backup StanzasCreated pgBackRest stanza creation completed successfully postgrescluster-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:15 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:15 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:16 +0000 UTC Warning Pod demand-backup-instance1-gfv8-0.spec.containers{database} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:16 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:17 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:17 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbouncer" in 293ms (293ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:17 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:17 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:17 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:18 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 182ms (182ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:18 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:18 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:18 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:18 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 201ms (201ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:18 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:18 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:19 +0000 UTC Normal Pod demand-backup-backup-48kl-cmzdg Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-48kl-cmzdg to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:19 +0000 UTC Normal Job.batch demand-backup-backup-48kl SuccessfulCreate Created pod: demand-backup-backup-48kl-cmzdg job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:20 +0000 UTC Normal Pod demand-backup-backup-48kl-cmzdg.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:20 +0000 UTC Normal Pod demand-backup-backup-48kl-cmzdg.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 120ms (120ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:20 +0000 UTC Normal Pod demand-backup-backup-48kl-cmzdg.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:27:20 +0000 UTC Normal Pod demand-backup-backup-48kl-cmzdg.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:28:34 +0000 UTC Normal Job.batch demand-backup-backup-48kl Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:28:49 +0000 UTC Normal Pod demand-backup-backup-lksw-92wn8 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-lksw-92wn8 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:28:49 +0000 UTC Normal Pod demand-backup-backup-lksw-92wn8.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:28:49 +0000 UTC Normal Pod demand-backup-backup-lksw-92wn8.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 112ms (112ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:28:49 +0000 UTC Normal Pod demand-backup-backup-lksw-92wn8.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:28:49 +0000 UTC Normal Pod demand-backup-backup-lksw-92wn8.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:28:49 +0000 UTC Normal Job.batch demand-backup-backup-lksw SuccessfulCreate Created pod: demand-backup-backup-lksw-92wn8 job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:31:57 +0000 UTC Normal Job.batch demand-backup-backup-lksw Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:32:19 +0000 UTC Normal Pod demand-backup-backup-7mnx-bzsqs Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-7mnx-bzsqs to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:32:19 +0000 UTC Normal Job.batch demand-backup-backup-7mnx SuccessfulCreate Created pod: demand-backup-backup-7mnx-bzsqs job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:32:20 +0000 UTC Normal Pod demand-backup-backup-7mnx-bzsqs.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:32:20 +0000 UTC Normal Pod demand-backup-backup-7mnx-bzsqs.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 122ms (122ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:32:20 +0000 UTC Normal Pod demand-backup-backup-7mnx-bzsqs.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:32:20 +0000 UTC Normal Pod demand-backup-backup-7mnx-bzsqs.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:33:18 +0000 UTC Normal Job.batch demand-backup-backup-7mnx Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:33:44 +0000 UTC Normal Pod demand-backup-backup-6flj-g2ds8 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-6flj-g2ds8 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:33:44 +0000 UTC Normal Job.batch demand-backup-backup-6flj SuccessfulCreate Created pod: demand-backup-backup-6flj-g2ds8 job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:33:45 +0000 UTC Normal Pod demand-backup-backup-6flj-g2ds8.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:33:45 +0000 UTC Normal Pod demand-backup-backup-6flj-g2ds8.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 125ms (125ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:33:45 +0000 UTC Normal Pod demand-backup-backup-6flj-g2ds8.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:33:45 +0000 UTC Normal Pod demand-backup-backup-6flj-g2ds8.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:35:07 +0000 UTC Normal Job.batch demand-backup-backup-6flj Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:35:32 +0000 UTC Normal Pod demand-backup-backup-gdfg-rxqbz Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-gdfg-rxqbz to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:35:32 +0000 UTC Normal Pod demand-backup-backup-gdfg-rxqbz.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:35:32 +0000 UTC Normal Pod demand-backup-backup-gdfg-rxqbz.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 120ms (120ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:35:32 +0000 UTC Normal Pod demand-backup-backup-gdfg-rxqbz.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:35:32 +0000 UTC Normal Pod demand-backup-backup-gdfg-rxqbz.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:35:32 +0000 UTC Normal Job.batch demand-backup-backup-gdfg SuccessfulCreate Created pod: demand-backup-backup-gdfg-rxqbz job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:36:33 +0000 UTC Normal Job.batch demand-backup-backup-gdfg Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:36:57 +0000 UTC Normal Pod demand-backup-backup-8njh-7kljz Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-8njh-7kljz to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:36:57 +0000 UTC Normal Job.batch demand-backup-backup-8njh SuccessfulCreate Created pod: demand-backup-backup-8njh-7kljz job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:36:58 +0000 UTC Normal Pod demand-backup-backup-8njh-7kljz.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:36:58 +0000 UTC Normal Pod demand-backup-backup-8njh-7kljz.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 119ms (119ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:36:58 +0000 UTC Normal Pod demand-backup-backup-8njh-7kljz.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:36:58 +0000 UTC Normal Pod demand-backup-backup-8njh-7kljz.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:38:43 +0000 UTC Normal Job.batch demand-backup-backup-8njh Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:39:07 +0000 UTC Normal Pod demand-backup-backup-bj6v-56dkj Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-bj6v-56dkj to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:39:07 +0000 UTC Normal Pod demand-backup-backup-bj6v-56dkj.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:39:07 +0000 UTC Normal Job.batch demand-backup-backup-bj6v SuccessfulCreate Created pod: demand-backup-backup-bj6v-56dkj job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:39:08 +0000 UTC Normal Pod demand-backup-backup-bj6v-56dkj.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 127ms (127ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:39:08 +0000 UTC Normal Pod demand-backup-backup-bj6v-56dkj.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:39:08 +0000 UTC Normal Pod demand-backup-backup-bj6v-56dkj.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:40:32 +0000 UTC Normal Job.batch demand-backup-backup-bj6v Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:40:56 +0000 UTC Normal Pod demand-backup-backup-zs82-7wkgm Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-zs82-7wkgm to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:40:56 +0000 UTC Normal Job.batch demand-backup-backup-zs82 SuccessfulCreate Created pod: demand-backup-backup-zs82-7wkgm job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:40:57 +0000 UTC Normal Pod demand-backup-backup-zs82-7wkgm.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:40:57 +0000 UTC Normal Pod demand-backup-backup-zs82-7wkgm.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 129ms (129ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:40:57 +0000 UTC Normal Pod demand-backup-backup-zs82-7wkgm.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:40:57 +0000 UTC Normal Pod demand-backup-backup-zs82-7wkgm.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:42:33 +0000 UTC Normal Job.batch demand-backup-backup-zs82 Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Killing Stopping container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Killing Stopping container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:24 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Killing Stopping container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:28 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-pgbackrest-restore-2tx5q to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:28 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore SuccessfulCreate Created pod: demand-backup-pgbackrest-restore-2tx5q job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:28 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-set-instance1 NoPods No matching pods found controllermanager logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:39 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:39 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 155ms (155ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:39 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:40 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:41 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q.spec.containers{pgbackrest-restore} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:41 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q.spec.containers{pgbackrest-restore} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 116ms (116ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:41 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q.spec.containers{pgbackrest-restore} Created Created container: pgbackrest-restore kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:41 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-2tx5q.spec.containers{pgbackrest-restore} Started Started container pgbackrest-restore kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:52 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:53 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-g6sn-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:53 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-g6sn SuccessfulCreate create Pod demand-backup-instance1-g6sn-0 in StatefulSet demand-backup-instance1-g6sn successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:59 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:59 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 112ms (112ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:59 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:43:59 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:00 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:00 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 118ms (118ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:00 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:00 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 116ms (116ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 152ms (152ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 113ms (113ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:02 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 127ms (127ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:02 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:02 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:05 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-backup-kxkg-tw2zx to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-t674 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:05 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:05 +0000 UTC Normal Job.batch demand-backup-backup-kxkg SuccessfulCreate Created pod: demand-backup-backup-kxkg-tw2zx job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:06 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 125ms (125ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:06 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:06 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:07 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 147ms (147ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:08 +0000 UTC Warning Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} BackOff Back-off restarting failed container pgbackrest in pod demand-backup-backup-kxkg-tw2zx_kuttl-test-workable-tapir(1eb09192-1f4f-4023-985b-01cdeccee9d3) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:08 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-gfv8-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-h2l6 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:08 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-gfv8 SuccessfulCreate create Pod demand-backup-instance1-gfv8-0 in StatefulSet demand-backup-instance1-gfv8 successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:08 +0000 UTC Normal Pod demand-backup-instance1-gj48-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-gj48-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-t674 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:08 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-gj48 SuccessfulCreate create Pod demand-backup-instance1-gj48-0 in StatefulSet demand-backup-instance1-gj48 successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:15 +0000 UTC Normal Pod demand-backup-instance1-gj48-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8cb40a3e-e79e-4af3-9946-d2ff26b4ceed" attachdetach-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:16 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-6da2f7e3-ddd6-4e9e-aa2c-51bb0793a32a" attachdetach-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:17 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 145ms (145ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 128ms (128ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 152ms (152ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:18 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:19 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:19 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 133ms (134ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:19 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:19 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:19 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 142ms (142ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:19 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:19 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 135ms (135ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 147ms (147ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 127ms (127ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 131ms (131ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 158ms (158ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 140ms (140ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:20 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 177ms (177ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 124ms (124ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:21 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:22 +0000 UTC Warning Pod demand-backup-instance1-gfv8-0.spec.containers{database} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:23 +0000 UTC Warning Pod demand-backup-instance1-gj48-0.spec.containers{database} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:44:46 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 155ms (155ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:45:27 +0000 UTC Normal Pod demand-backup-backup-kxkg-tw2zx.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 139ms (139ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:45:46 +0000 UTC Normal Job.batch demand-backup-backup-kxkg Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:01 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:06 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-pgbackrest-restore-cnhqr to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:06 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore SuccessfulCreate Created pod: demand-backup-pgbackrest-restore-cnhqr job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:12 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:12 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 124ms (124ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:12 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:12 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:13 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr.spec.containers{pgbackrest-restore} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:13 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr.spec.containers{pgbackrest-restore} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 112ms (112ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:13 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr.spec.containers{pgbackrest-restore} Created Created container: pgbackrest-restore kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:13 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cnhqr.spec.containers{pgbackrest-restore} Started Started container pgbackrest-restore kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:25 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore Completed Job completed job-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:26 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-g6sn-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-r67m default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:26 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-g6sn SuccessfulCreate create Pod demand-backup-instance1-g6sn-0 in StatefulSet demand-backup-instance1-g6sn successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:32 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:32 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 111ms (111ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:32 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:32 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:33 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:33 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 112ms (112ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:33 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:33 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:34 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:34 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 117ms (117ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:34 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:34 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:34 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 133ms (133ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 132ms (132ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 141ms (141ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:35 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:40 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-gfv8-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-t674 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:40 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-gfv8 SuccessfulCreate create Pod demand-backup-instance1-gfv8-0 in StatefulSet demand-backup-instance1-gfv8 successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:41 +0000 UTC Normal Pod demand-backup-instance1-gj48-0 Binding Scheduled Successfully assigned kuttl-test-workable-tapir/demand-backup-instance1-gj48-0 to gke-jen-pg-1083-44dc2e61-default-pool-01f980ad-h2l6 default-scheduler logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:41 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-gj48 SuccessfulCreate create Pod demand-backup-instance1-gj48-0 in StatefulSet demand-backup-instance1-gj48 successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:48 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-6da2f7e3-ddd6-4e9e-aa2c-51bb0793a32a" attachdetach-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:48 +0000 UTC Normal Pod demand-backup-instance1-gj48-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-8cb40a3e-e79e-4af3-9946-d2ff26b4ceed" attachdetach-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:51 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:51 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:51 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 136ms (136ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:51 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:51 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 135ms (135ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 151ms (151ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 128ms (128ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:52 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 156ms (156ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 124ms (124ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 134ms (134ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 123ms (123ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:53 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{database} Started Started container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-postgres" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-postgres" in 148ms (148ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 131ms (131ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Pulling Pulling image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 144ms (144ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "perconalab/percona-postgresql-operator:main-ppg17-pgbackrest" in 117ms (117ms including waiting) kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:54 +0000 UTC Normal Pod demand-backup-instance1-gj48-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:55 +0000 UTC Normal Pod demand-backup-instance1-gfv8-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:56 +0000 UTC Warning Pod demand-backup-instance1-gj48-0.spec.containers{database} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:46:58 +0000 UTC Warning Pod demand-backup-instance1-gfv8-0.spec.containers{database} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:25 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-gfv8 SuccessfulDelete delete Pod demand-backup-instance1-gfv8-0 in StatefulSet demand-backup-instance1-gfv8 successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:25 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-gj48 SuccessfulDelete delete Pod demand-backup-instance1-gj48-0 in StatefulSet demand-backup-instance1-gj48 successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:26 +0000 UTC Normal Pod demand-backup-instance1-g6sn-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:26 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-g6sn SuccessfulDelete delete Pod demand-backup-instance1-g6sn-0 in StatefulSet demand-backup-instance1-g6sn successful statefulset-controller logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:30 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:30 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8bs7h.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:30 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:30 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-8lqg5.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:30 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:30 +0000 UTC Normal Pod demand-backup-pgbouncer-65f79457f6-fb7sf.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:30 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 16:49:49 | demand-backup | 2025-03-14 16:49:30 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Killing Stopping container pgbackrest kubelet logger.go:42: 16:49:50 | demand-backup | Deleting namespace: kuttl-test-workable-tapir === NAME kuttl harness.go:407: run tests finished harness.go:515: cleaning up harness.go:572: removing temp folder: "" --- PASS: kuttl (1474.26s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup (1473.82s) PASS