=== RUN kuttl harness.go:460: starting setup harness.go:258: running tests using configured kubeconfig. harness.go:281: Successful connection to cluster at: https://34.58.166.253 harness.go:366: running tests harness.go:77: going to run test suite with timeout of 600 seconds for each step harness.go:378: testsuite: e2e-tests/tests has 32 tests === RUN kuttl/harness === RUN kuttl/harness/demand-backup === PAUSE kuttl/harness/demand-backup === CONT kuttl/harness/demand-backup logger.go:42: 17:20:10 | demand-backup | Creating namespace "kuttl-test-enough-midge" logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | starting test step 0-deploy-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | running command: [sh -c set -o errexit set -o xtrace source ../../functions init_temp_dir # do this only in the first TestStep deploy_operator deploy_client deploy_s3_secrets] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | + source ../../functions logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ realpath ../../.. logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ pwd logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++ test_name=demand-backup logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ command -v oc logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ ! -n '' ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ -n '' ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export PG_VER=18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ PG_VER=18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export BUCKET=pg-operator-testing logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ BUCKET=pg-operator-testing logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export PGOV1_VER=14 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ PGOV1_VER=14 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export MINIO_VER=5.4.0 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ MINIO_VER=5.4.0 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ REGISTRY_NAME=docker.io logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ printenv logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ grep -E '^IMAGE' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ awk -F= '{print $1}' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ which gdate logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ which date logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ date=/usr/sbin/date logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ which gsed logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++++ which sed logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | +++ sed=/usr/sbin/sed logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | ++ oc get projects logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | + init_temp_dir logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | + rm -rf /tmp/kuttl/pg/demand-backup logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | + mkdir -p /tmp/kuttl/pg/demand-backup logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | + deploy_operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | + local cw_prefix= logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | + destroy_operator logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | + kubectl -n pg-operator delete deployment percona-postgresql-operator --force --grace-period=0 logger.go:42: 17:20:10 | demand-backup/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | Error from server (NotFound): deployments.apps "percona-postgresql-operator" not found logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + true logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + [[ -n pg-operator ]] logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + kubectl delete namespace pg-operator --force --grace-period=0 logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | Error from server (NotFound): namespaces "pg-operator" not found logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + true logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + [[ -n pg-operator ]] logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + create_namespace pg-operator logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + local namespace=pg-operator logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + [[ -n '' ]] logger.go:42: 17:20:11 | demand-backup/0-deploy-operator | + kubectl delete namespace pg-operator --ignore-not-found logger.go:42: 17:20:12 | demand-backup/0-deploy-operator | + kubectl wait --for=delete namespace pg-operator logger.go:42: 17:20:12 | demand-backup/0-deploy-operator | + kubectl create namespace pg-operator logger.go:42: 17:20:13 | demand-backup/0-deploy-operator | namespace/pg-operator created logger.go:42: 17:20:13 | demand-backup/0-deploy-operator | + cw_prefix=cw- logger.go:42: 17:20:13 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy/crd.yaml logger.go:42: 17:20:14 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/crunchybridgeclusters.postgres-operator.crunchydata.com serverside-applied logger.go:42: 17:20:14 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgbackups.pgv2.percona.com serverside-applied logger.go:42: 17:20:16 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgclusters.pgv2.percona.com serverside-applied logger.go:42: 17:20:16 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgrestores.pgv2.percona.com serverside-applied logger.go:42: 17:20:16 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/perconapgupgrades.pgv2.percona.com serverside-applied logger.go:42: 17:20:17 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/pgadmins.postgres-operator.crunchydata.com serverside-applied logger.go:42: 17:20:17 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/pgupgrades.postgres-operator.crunchydata.com serverside-applied logger.go:42: 17:20:19 | demand-backup/0-deploy-operator | customresourcedefinition.apiextensions.k8s.io/postgresclusters.postgres-operator.crunchydata.com serverside-applied logger.go:42: 17:20:19 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy/cw-rbac.yaml logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | serviceaccount/percona-postgresql-operator serverside-applied logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | clusterrole.rbac.authorization.k8s.io/percona-postgresql-operator serverside-applied logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | clusterrolebinding.rbac.authorization.k8s.io/percona-postgresql-operator serverside-applied logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | + local disable_telemetry=true logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | + '[' demand-backup == telemetry-transfer ']' logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | + yq eval '.spec.template.spec.containers[0].image = "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084"' /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy/cw-operator.yaml logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | + yq eval '(.spec.template.spec.containers[] | select(.name=="operator") | .env[] | select(.name=="DISABLE_TELEMETRY") | .value) = "true"' - logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | + yq eval '(.spec.template.spec.containers[] | select(.name=="operator") | .env[] | select(.name=="LOG_LEVEL") | .value) = "DEBUG"' - logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | + yq eval '(.spec.template.spec.containers[] | select(.name=="operator") | .env[] | select(.name=="PGO_FEATURE_GATES") | .value) = ""' - logger.go:42: 17:20:20 | demand-backup/0-deploy-operator | + kubectl -n pg-operator apply -f - logger.go:42: 17:20:21 | demand-backup/0-deploy-operator | deployment.apps/percona-postgresql-operator created logger.go:42: 17:20:21 | demand-backup/0-deploy-operator | + deploy_client logger.go:42: 17:20:21 | demand-backup/0-deploy-operator | + kubectl -n kuttl-test-enough-midge apply -f /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf/client.yaml logger.go:42: 17:20:23 | demand-backup/0-deploy-operator | deployment.apps/pg-client created logger.go:42: 17:20:23 | demand-backup/0-deploy-operator | + deploy_s3_secrets logger.go:42: 17:20:23 | demand-backup/0-deploy-operator | + set +o xtrace logger.go:42: 17:20:23 | demand-backup/0-deploy-operator | secret/demand-backup-pgbackrest-secrets created logger.go:42: 17:20:24 | demand-backup/0-deploy-operator | running command: [sh -c kubectl assert exist-enhanced deployment percona-postgresql-operator -n ${OPERATOR_NS:-$NAMESPACE} --field-selector status.readyReplicas=1] logger.go:42: 17:20:24 | demand-backup/0-deploy-operator | ASSERT deployment percona-postgresql-operator matching field criteria 'status.readyReplicas=1' should exist. logger.go:42: 17:20:25 | demand-backup/0-deploy-operator | INFO Found 1 resource(s). logger.go:42: 17:20:25 | demand-backup/0-deploy-operator | NAME NAMESPACE COL0 logger.go:42: 17:20:25 | demand-backup/0-deploy-operator | percona-postgresql-operator pg-operator 1 logger.go:42: 17:20:25 | demand-backup/0-deploy-operator | ASSERT PASS logger.go:42: 17:20:25 | demand-backup/0-deploy-operator | test step completed 0-deploy-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | starting test step 1-create-cluster logger.go:42: 17:20:25 | demand-backup/1-create-cluster | running command: [sh -c set -o errexit set -o xtrace source ../../functions get_cr "demand-backup" ${RANDOM} \ | yq '.metadata.finalizers=["percona.com/delete-backups"]' \ | yq '.spec.backups.pgbackrest.global.log-level-console="debug"' \ | yq '.spec.backups.pgbackrest.global.repo1-retention-full="2"' \ | yq '.spec.backups.pgbackrest.global.repo1-retention-full-type="count"' \ | yq '.spec.backups.pgbackrest.global.repo3-retention-full="2"' \ | yq '.spec.backups.pgbackrest.global.repo3-retention-full-type="count"' \ | yq '.spec.backups.pgbackrest.jobs.backoffLimit=20' \ | yq '.spec.backups.pgbackrest.jobs.restartPolicy="OnFailure"' \ | kubectl -n "${NAMESPACE}" apply -f -] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + source ../../functions logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ realpath ../../.. logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ pwd logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++ test_name=demand-backup logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ command -v oc logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ ! -n '' ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ -n '' ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export PG_VER=18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ PG_VER=18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export BUCKET=pg-operator-testing logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ BUCKET=pg-operator-testing logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export PGOV1_VER=14 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ PGOV1_VER=14 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export MINIO_VER=5.4.0 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ MINIO_VER=5.4.0 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ REGISTRY_NAME=docker.io logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ printenv logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ grep -E '^IMAGE' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ awk -F= '{print $1}' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ which gdate logger.go:42: 17:20:25 | demand-backup/1-create-cluster | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ which date logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ date=/usr/sbin/date logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ which gsed logger.go:42: 17:20:25 | demand-backup/1-create-cluster | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++++ which sed logger.go:42: 17:20:25 | demand-backup/1-create-cluster | +++ sed=/usr/sbin/sed logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ++ oc get projects logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + get_cr demand-backup 3819 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + local cr_name=demand-backup logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + '[' -z demand-backup ']' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq '.metadata.finalizers=["percona.com/delete-backups"]' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + local repo_path=3819 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + local source_path= logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq eval ' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .metadata.name = "demand-backup" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .metadata.labels = {"e2e":"demand-backup"} | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.postgresVersion = 18 | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.users += [{"name":"postgres","password":{"type":"AlphaNumeric"}}] | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.users += [{"name":"demand-backup","password":{"type":"AlphaNumeric"}}] | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.image = "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.initContainer.image = "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.image = "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.proxy.pgBouncer.image = "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.pmm.image = "docker.io/perconalab/pmm-client:dev-latest" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.pmm.secret = "demand-backup-pmm-secret" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.pmm.customClusterName = "demand-backup-pmm-custom-name" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.pmm.postgresParams = "--environment=dev-postgres" logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ' /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy/cr.yaml logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.log-level-console="debug"' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo1-retention-full="2"' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo1-retention-full-type="count"' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo3-retention-full="2"' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.global.repo3-retention-full-type="count"' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq .spec.backups.pgbackrest.jobs.backoffLimit=20 logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + kubectl -n kuttl-test-enough-midge apply -f - logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq '.spec.backups.pgbackrest.jobs.restartPolicy="OnFailure"' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + [[ -n '' ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + case $test_name in logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq eval -i ' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.configuration = [{"secret":{"name":"demand-backup-pgbackrest-secrets"}}] | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.manual.repoName = "repo1" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.manual.options = ["--type=full"] | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.global.repo1-path = "/backrestrepo/postgres-operator/3819/repo1" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.repos = [{"name":"repo1","s3":{"bucket":"pg-operator-testing","endpoint":"s3.amazonaws.com","region":"us-east-1"}}] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ' /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + [[ demand-backup == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + yq eval -i ' logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.global.repo3-path = "/backrestrepo/postgres-operator/3819/repo3" | logger.go:42: 17:20:25 | demand-backup/1-create-cluster | .spec.backups.pgbackrest.repos += [{"name":"repo3","azure":{"container":"pg-operator-testing"}}] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | ' /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + [[ demand-backup == \s\t\a\r\t\-\f\r\o\m\-\b\a\c\k\u\p ]] logger.go:42: 17:20:25 | demand-backup/1-create-cluster | + cat /tmp/kuttl/pg/demand-backup/cr.yaml logger.go:42: 17:20:26 | demand-backup/1-create-cluster | perconapgcluster.pgv2.percona.com/demand-backup created logger.go:42: 17:22:19 | demand-backup/1-create-cluster | test step completed 1-create-cluster logger.go:42: 17:22:19 | demand-backup/2-write-data | starting test step 2-write-data logger.go:42: 17:22:19 | demand-backup/2-write-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_psql_local \ 'CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' \ "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)" run_psql_local \ '\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' \ "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)"] logger.go:42: 17:22:19 | demand-backup/2-write-data | + source ../../functions logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ realpath ../../.. logger.go:42: 17:22:19 | demand-backup/2-write-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:22:19 | demand-backup/2-write-data | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ pwd logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:22:19 | demand-backup/2-write-data | ++ test_name=demand-backup logger.go:42: 17:22:19 | demand-backup/2-write-data | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ command -v oc logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ ! -n '' ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ -n '' ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export PG_VER=18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ PG_VER=18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export BUCKET=pg-operator-testing logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ BUCKET=pg-operator-testing logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export PGOV1_VER=14 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ PGOV1_VER=14 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export MINIO_VER=5.4.0 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ MINIO_VER=5.4.0 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ REGISTRY_NAME=docker.io logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ printenv logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ grep -E '^IMAGE' logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ awk -F= '{print $1}' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:22:19 | demand-backup/2-write-data | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ which gdate logger.go:42: 17:22:19 | demand-backup/2-write-data | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ which date logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ date=/usr/sbin/date logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ which gsed logger.go:42: 17:22:19 | demand-backup/2-write-data | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:22:19 | demand-backup/2-write-data | ++++ which sed logger.go:42: 17:22:19 | demand-backup/2-write-data | +++ sed=/usr/sbin/sed logger.go:42: 17:22:19 | demand-backup/2-write-data | ++ oc get projects logger.go:42: 17:22:19 | demand-backup/2-write-data | ++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 17:22:19 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-enough-midge get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 17:22:20 | demand-backup/2-write-data | ++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 17:22:20 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 17:22:20 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-enough-midge get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 17:22:20 | demand-backup/2-write-data | + run_psql_local 'CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc logger.go:42: 17:22:20 | demand-backup/2-write-data | + local 'command=CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' logger.go:42: 17:22:20 | demand-backup/2-write-data | + local uri=postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc logger.go:42: 17:22:20 | demand-backup/2-write-data | + local driver=postgres logger.go:42: 17:22:20 | demand-backup/2-write-data | ++ get_client_pod logger.go:42: 17:22:20 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-enough-midge get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:22:21 | demand-backup/2-write-data | + kubectl -n kuttl-test-enough-midge exec pg-client-65d98588cc-4nm8s -- bash -c 'printf '\''CREATE DATABASE myapp; \c myapp \\\ CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc'\''' logger.go:42: 17:22:22 | demand-backup/2-write-data | ++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 17:22:22 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 17:22:22 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-enough-midge get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 17:22:23 | demand-backup/2-write-data | ++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 17:22:23 | demand-backup/2-write-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 17:22:23 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-enough-midge get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 17:22:23 | demand-backup/2-write-data | + run_psql_local '\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc logger.go:42: 17:22:23 | demand-backup/2-write-data | + local 'command=\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)' logger.go:42: 17:22:23 | demand-backup/2-write-data | + local uri=postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc logger.go:42: 17:22:23 | demand-backup/2-write-data | + local driver=postgres logger.go:42: 17:22:23 | demand-backup/2-write-data | ++ get_client_pod logger.go:42: 17:22:23 | demand-backup/2-write-data | ++ kubectl -n kuttl-test-enough-midge get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:22:24 | demand-backup/2-write-data | + kubectl -n kuttl-test-enough-midge exec pg-client-65d98588cc-4nm8s -- bash -c 'printf '\''\c myapp \\\ INSERT INTO myApp (id) VALUES (100500)\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc'\''' logger.go:42: 17:22:24 | demand-backup/2-write-data | test step completed 2-write-data logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | starting test step 3-read-from-primary logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | running command: [sh -c set -o errexit set -o xtrace source ../../functions data=$(run_psql_local '\c myapp \\\ SELECT * from myApp;' "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)") kubectl create configmap -n "${NAMESPACE}" 03-read-from-primary --from-literal=data="${data}"] logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | + source ../../functions logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ realpath ../../.. logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | ++++ pwd logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | ++ test_name=demand-backup logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:22:24 | demand-backup/3-read-from-primary | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ command -v oc logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ ! -n '' ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ -n '' ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export PG_VER=18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ PG_VER=18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export BUCKET=pg-operator-testing logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ BUCKET=pg-operator-testing logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export PGOV1_VER=14 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ PGOV1_VER=14 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export MINIO_VER=5.4.0 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ MINIO_VER=5.4.0 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ REGISTRY_NAME=docker.io logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ printenv logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ grep -E '^IMAGE' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ awk -F= '{print $1}' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ which gdate logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ which date logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ date=/usr/sbin/date logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ which gsed logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++++ which sed logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ sed=/usr/sbin/sed logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | ++ oc get projects logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ local secret_name=demand-backup-pguser-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-enough-midge get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ local secret_name=demand-backup-pguser-postgres logger.go:42: 17:22:25 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-enough-midge get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 17:22:26 | demand-backup/3-read-from-primary | ++ run_psql_local '\c myapp \\\ SELECT * from myApp;' postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc logger.go:42: 17:22:26 | demand-backup/3-read-from-primary | ++ local 'command=\c myapp \\\ SELECT * from myApp;' logger.go:42: 17:22:26 | demand-backup/3-read-from-primary | ++ local uri=postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc logger.go:42: 17:22:26 | demand-backup/3-read-from-primary | ++ local driver=postgres logger.go:42: 17:22:26 | demand-backup/3-read-from-primary | +++ get_client_pod logger.go:42: 17:22:26 | demand-backup/3-read-from-primary | +++ kubectl -n kuttl-test-enough-midge get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:22:26 | demand-backup/3-read-from-primary | ++ kubectl -n kuttl-test-enough-midge exec pg-client-65d98588cc-4nm8s -- bash -c 'printf '\''\c myapp \\\ SELECT * from myApp;\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc'\''' logger.go:42: 17:22:27 | demand-backup/3-read-from-primary | + data=' 100500' logger.go:42: 17:22:27 | demand-backup/3-read-from-primary | + kubectl create configmap -n kuttl-test-enough-midge 03-read-from-primary '--from-literal=data= 100500' logger.go:42: 17:22:28 | demand-backup/3-read-from-primary | configmap/03-read-from-primary created logger.go:42: 17:22:28 | demand-backup/3-read-from-primary | test step completed 3-read-from-primary logger.go:42: 17:22:28 | demand-backup/4-create-backup-s3 | starting test step 4-create-backup-s3 logger.go:42: 17:22:29 | demand-backup/4-create-backup-s3 | PerconaPGBackup:kuttl-test-enough-midge/demand-backup-full-s3 created logger.go:42: 17:25:55 | demand-backup/4-create-backup-s3 | test step completed 4-create-backup-s3 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | starting test step 5-check-pgbackrest-info-s3 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | running command: [sh -c set -o errexit set -o xtrace source ../../functions instance=$(kubectl get -n "${NAMESPACE}" pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}') pgbackrest_info_backups=$(kubectl exec -n "${NAMESPACE}" "$instance" -c database -- pgbackrest info --output json --log-level-console=info | jq '.[0].backup[]') check_backup() { local backup_name=$1 local pgbackrest_annotation=$2 local pgbackrest_annotation_value=$3 status_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.backupName}') if [[ -z $status_backup_name ]]; then echo ".status.backupName is empty in $backup_name" exit 1 fi backup_info=$(echo "$pgbackrest_info_backups" | jq "select(.annotation.\"$pgbackrest_annotation\" == \"$pgbackrest_annotation_value\")") if [[ $status_backup_name != $(echo "$backup_info" | jq ".label" --raw-output) ]]; then echo ".status.backupName doesn't equal to label in pgbackrest info" exit 1 fi backup_job_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.jobName}') backup_job_annotation=$(echo "$backup_info" | jq '.annotation."percona.com/backup-job-name"' --raw-output) if [[ $backup_job_name != "$backup_job_annotation" ]]; then echo "Failed to get job name annotation from pgbackrest" exit 1 fi } manual_backup_name="demand-backup-full-s3" check_backup "$manual_backup_name" "percona.com/backup-name" "$manual_backup_name" replica_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup -o jsonpath='{.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}') check_backup "$replica_backup_name" "percona.com/backup-job-type" "replica-create"] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | + source ../../functions logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ realpath ../../.. logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ pwd logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++ test_name=demand-backup logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ command -v oc logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ ! -n '' ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ -n '' ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PG_VER=18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ PG_VER=18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export BUCKET=pg-operator-testing logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ BUCKET=pg-operator-testing logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export PGOV1_VER=14 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ PGOV1_VER=14 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export MINIO_VER=5.4.0 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ MINIO_VER=5.4.0 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ REGISTRY_NAME=docker.io logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ printenv logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ grep -E '^IMAGE' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ awk -F= '{print $1}' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which gdate logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which date logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ date=/usr/sbin/date logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which gsed logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++++ which sed logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | +++ sed=/usr/sbin/sed logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++ oc get projects logger.go:42: 17:25:55 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-enough-midge pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:25:56 | demand-backup/5-check-pgbackrest-info-s3 | + instance=demand-backup-instance1-jssh-0 logger.go:42: 17:25:56 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl exec -n kuttl-test-enough-midge demand-backup-instance1-jssh-0 -c database -- pgbackrest info --output json --log-level-console=info logger.go:42: 17:25:56 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.[0].backup[]' logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + pgbackrest_info_backups='{ logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000004", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24244109, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133622, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133622 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24244109 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172055F", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/40341B8", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6000050" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348855, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771348933 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32370883, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4213087, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4213087 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32370883 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172235F", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348955, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771349133 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + manual_backup_name=demand-backup-full-s3 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + check_backup demand-backup-full-s3 percona.com/backup-name demand-backup-full-s3 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + local backup_name=demand-backup-full-s3 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation=percona.com/backup-name logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation_value=demand-backup-full-s3 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-enough-midge pg-backup demand-backup-full-s3 -o 'jsonpath={.status.backupName}' logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + status_backup_name=20260217-172235F logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + [[ -z 20260217-172235F ]] logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000004", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24244109, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133622, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133622 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24244109 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172055F", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/40341B8", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6000050" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348855, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771348933 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32370883, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4213087, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4213087 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32370883 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172235F", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348955, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771349133 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq 'select(.annotation."percona.com/backup-name" == "demand-backup-full-s3")' logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + backup_info='{ logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32370883, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4213087, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4213087 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32370883 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172235F", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348955, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771349133 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32370883, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4213087, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4213087 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32370883 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172235F", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348955, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771349133 logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq .label --raw-output logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | + [[ 20260217-172235F != 20260217-172235F ]] logger.go:42: 17:25:58 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-enough-midge pg-backup demand-backup-full-s3 -o 'jsonpath={.status.jobName}' logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_name=demand-backup-backup-fs62 logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32370883, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4213087, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4213087 logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32370883 logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172235F", logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348955, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771349133 logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_annotation=demand-backup-backup-fs62 logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | + [[ demand-backup-backup-fs62 != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\f\s\6\2 ]] logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-enough-midge pg-backup -o 'jsonpath={.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}' logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | + replica_backup_name=demand-backup-backup-v6ml-n4ww5 logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | + check_backup demand-backup-backup-v6ml-n4ww5 percona.com/backup-job-type replica-create logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | + local backup_name=demand-backup-backup-v6ml-n4ww5 logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation=percona.com/backup-job-type logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | + local pgbackrest_annotation_value=replica-create logger.go:42: 17:25:59 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-enough-midge pg-backup demand-backup-backup-v6ml-n4ww5 -o 'jsonpath={.status.backupName}' logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | + status_backup_name=20260217-172055F logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | + [[ -z 20260217-172055F ]] logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000004", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24244109, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133622, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133622 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24244109 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172055F", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/40341B8", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6000050" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348855, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771348933 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | } logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000008", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "00000001000000000000000A" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 32370883, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 4213087, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 4213087 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 32370883 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172235F", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/8000028", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/A000050" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348955, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771349133 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq 'select(.annotation."percona.com/backup-job-type" == "replica-create")' logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | + backup_info='{ logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000004", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24244109, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133622, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133622 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24244109 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172055F", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/40341B8", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6000050" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348855, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771348933 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000004", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24244109, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133622, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133622 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24244109 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172055F", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/40341B8", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6000050" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348855, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771348933 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq .label --raw-output logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | + [[ 20260217-172055F != 20260217-172055F ]] logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | ++ kubectl get -n kuttl-test-enough-midge pg-backup demand-backup-backup-v6ml-n4ww5 -o 'jsonpath={.status.jobName}' logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_name=demand-backup-backup-v6ml logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | ++ echo '{ logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "annotation": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "archive": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "000000010000000000000004", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "000000010000000000000006" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "backrest": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "format": 5, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "version": "2.57.0" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "database": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "id": 1, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repo-key": 1 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "error": false, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "info": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 24244109, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "repository": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "delta": 3133622, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 3133622 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "size": 24244109 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "label": "20260217-172055F", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "lsn": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": "0/40341B8", logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": "0/6000050" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "prior": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "reference": null, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "timestamp": { logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "start": 1771348855, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "stop": 1771348933 logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }, logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | "type": "full" logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | }' logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | + backup_job_annotation=demand-backup-backup-v6ml logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | + [[ demand-backup-backup-v6ml != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\v\6\m\l ]] logger.go:42: 17:26:00 | demand-backup/5-check-pgbackrest-info-s3 | test step completed 5-check-pgbackrest-info-s3 logger.go:42: 17:26:00 | demand-backup/6-create-backup-azure | starting test step 6-create-backup-azure logger.go:42: 17:26:01 | demand-backup/6-create-backup-azure | PerconaPGBackup:kuttl-test-enough-midge/demand-backup-full-azure created logger.go:42: 17:27:27 | demand-backup/6-create-backup-azure | test step completed 6-create-backup-azure logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | starting test step 7-check-pgbackrest-info-azure logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | running command: [sh -c set -o errexit set -o xtrace source ../../functions instance=$(kubectl get -n "${NAMESPACE}" pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}') pgbackrest_info_backups=$(kubectl exec -n "${NAMESPACE}" "$instance" -c database -- pgbackrest info --output json --log-level-console=info | jq '.[0].backup[]') check_backup() { local backup_name=$1 local pgbackrest_annotation=$2 local pgbackrest_annotation_value=$3 status_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.backupName}') if [[ -z $status_backup_name ]]; then echo ".status.backupName is empty in $backup_name" exit 1 fi backup_info=$(echo "$pgbackrest_info_backups" | jq "select(.annotation.\"$pgbackrest_annotation\" == \"$pgbackrest_annotation_value\")") if [[ $status_backup_name != $(echo "$backup_info" | jq ".label" --raw-output) ]]; then echo ".status.backupName doesn't equal to label in pgbackrest info" exit 1 fi backup_job_name=$(kubectl get -n "${NAMESPACE}" pg-backup "$backup_name" -o jsonpath='{.status.jobName}') backup_job_annotation=$(echo "$backup_info" | jq '.annotation."percona.com/backup-job-name"' --raw-output) if [[ $backup_job_name != "$backup_job_annotation" ]]; then echo "Failed to get job name annotation from pgbackrest" exit 1 fi } manual_backup_name="demand-backup-full-azure" check_backup "$manual_backup_name" "percona.com/backup-name" "$manual_backup_name" replica_backup_name=$(kubectl get -n "${NAMESPACE}" pg-backup -o jsonpath='{.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}') check_backup "$replica_backup_name" "percona.com/backup-job-type" "replica-create"] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | + source ../../functions logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ realpath ../../.. logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ pwd logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++ test_name=demand-backup logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ command -v oc logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ ! -n '' ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ -n '' ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export PG_VER=18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ PG_VER=18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export BUCKET=pg-operator-testing logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ BUCKET=pg-operator-testing logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export PGOV1_VER=14 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ PGOV1_VER=14 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export MINIO_VER=5.4.0 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ MINIO_VER=5.4.0 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ REGISTRY_NAME=docker.io logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ printenv logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ grep -E '^IMAGE' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ awk -F= '{print $1}' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ which gdate logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ which date logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ date=/usr/sbin/date logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ which gsed logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++++ which sed logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | +++ sed=/usr/sbin/sed logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++ oc get projects logger.go:42: 17:27:27 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-enough-midge pod -l postgres-operator.crunchydata.com/instance-set=instance1 -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:27:28 | demand-backup/7-check-pgbackrest-info-azure | + instance=demand-backup-instance1-jssh-0 logger.go:42: 17:27:28 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl exec -n kuttl-test-enough-midge demand-backup-instance1-jssh-0 -c database -- pgbackrest info --output json --log-level-console=info logger.go:42: 17:27:28 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.[0].backup[]' logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | + pgbackrest_info_backups='{ logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000004", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24244109, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133622, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133622 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "size": 24244109 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172055F", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/40341B8", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6000050" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348855, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771348933 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32370883, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213087, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213087 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "size": 32370883 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172235F", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348955, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349133 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-d62z", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32440194, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213984, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213984 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "size": 32440194 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172608F", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771349168, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349224 logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | + manual_backup_name=demand-backup-full-azure logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | + check_backup demand-backup-full-azure percona.com/backup-name demand-backup-full-azure logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | + local backup_name=demand-backup-full-azure logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation=percona.com/backup-name logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation_value=demand-backup-full-azure logger.go:42: 17:27:30 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-enough-midge pg-backup demand-backup-full-azure -o 'jsonpath={.status.backupName}' logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | + status_backup_name=20260217-172608F logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | + [[ -z 20260217-172608F ]] logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000004", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24244109, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133622, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133622 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 24244109 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172055F", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/40341B8", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6000050" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348855, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771348933 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32370883, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213087, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213087 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 32370883 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172235F", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348955, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349133 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-d62z", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32440194, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213984, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213984 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 32440194 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172608F", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771349168, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349224 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | ++ jq 'select(.annotation."percona.com/backup-name" == "demand-backup-full-azure")' logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | + backup_info='{ logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-d62z", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32440194, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213984, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213984 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 32440194 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172608F", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771349168, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349224 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-d62z", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32440194, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213984, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213984 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 32440194 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172608F", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771349168, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349224 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | ++ jq .label --raw-output logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | + [[ 20260217-172608F != 20260217-172608F ]] logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-enough-midge pg-backup demand-backup-full-azure -o 'jsonpath={.status.jobName}' logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_name=demand-backup-backup-d62z logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-d62z", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32440194, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213984, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213984 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "size": 32440194 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172608F", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771349168, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349224 logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_annotation=demand-backup-backup-d62z logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | + [[ demand-backup-backup-d62z != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\d\6\2\z ]] logger.go:42: 17:27:31 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-enough-midge pg-backup -o 'jsonpath={.items[?(@.metadata.annotations.pgv2\.percona\.com/pgbackrest-backup-job-type=="replica-create")].metadata.name}' logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + replica_backup_name=demand-backup-backup-v6ml-n4ww5 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + check_backup demand-backup-backup-v6ml-n4ww5 percona.com/backup-job-type replica-create logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + local backup_name=demand-backup-backup-v6ml-n4ww5 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation=percona.com/backup-job-type logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + local pgbackrest_annotation_value=replica-create logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-enough-midge pg-backup demand-backup-backup-v6ml-n4ww5 -o 'jsonpath={.status.backupName}' logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + status_backup_name=20260217-172055F logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + [[ -z 20260217-172055F ]] logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000004", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24244109, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133622, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133622 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 24244109 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172055F", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/40341B8", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6000050" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348855, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771348933 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-fs62", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-s3" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000008", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000A" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32370883, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213087, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213087 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 32370883 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172235F", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/8000028", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/A000050" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348955, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349133 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | } logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-d62z", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-name": "demand-backup-full-azure" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "00000001000000000000000C", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "00000001000000000000000C" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 3 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 32440194, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 4213984, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 4213984 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 32440194 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172608F", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/C000028", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/C000190" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771349168, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771349224 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | ++ jq 'select(.annotation."percona.com/backup-job-type" == "replica-create")' logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + backup_info='{ logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000004", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24244109, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133622, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133622 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 24244109 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172055F", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/40341B8", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6000050" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348855, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771348933 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000004", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24244109, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133622, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133622 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "size": 24244109 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172055F", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/40341B8", logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6000050" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348855, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771348933 logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | ++ jq .label --raw-output logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | + [[ 20260217-172055F != 20260217-172055F ]] logger.go:42: 17:27:32 | demand-backup/7-check-pgbackrest-info-azure | ++ kubectl get -n kuttl-test-enough-midge pg-backup demand-backup-backup-v6ml-n4ww5 -o 'jsonpath={.status.jobName}' logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_name=demand-backup-backup-v6ml logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | ++ echo '{ logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "annotation": { logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-name": "demand-backup-backup-v6ml", logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "percona.com/backup-job-type": "replica-create" logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "archive": { logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "start": "000000010000000000000004", logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "stop": "000000010000000000000006" logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "backrest": { logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "format": 5, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "version": "2.57.0" logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "database": { logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "id": 1, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "repo-key": 1 logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "error": false, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "info": { logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "delta": 24244109, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "repository": { logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "delta": 3133622, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "size": 3133622 logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "size": 24244109 logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "label": "20260217-172055F", logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "lsn": { logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "start": "0/40341B8", logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "stop": "0/6000050" logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "prior": null, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "reference": null, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "timestamp": { logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "start": 1771348855, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "stop": 1771348933 logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }, logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | "type": "full" logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | }' logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | ++ jq '.annotation."percona.com/backup-job-name"' --raw-output logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | + backup_job_annotation=demand-backup-backup-v6ml logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | + [[ demand-backup-backup-v6ml != \d\e\m\a\n\d\-\b\a\c\k\u\p\-\b\a\c\k\u\p\-\v\6\m\l ]] logger.go:42: 17:27:33 | demand-backup/7-check-pgbackrest-info-azure | test step completed 7-check-pgbackrest-info-azure logger.go:42: 17:27:33 | demand-backup/8-create-second-backup-s3 | starting test step 8-create-second-backup-s3 logger.go:42: 17:27:33 | demand-backup/8-create-second-backup-s3 | PerconaPGBackup:kuttl-test-enough-midge/demand-backup-full-s3-2 created logger.go:42: 17:29:20 | demand-backup/8-create-second-backup-s3 | test step completed 8-create-second-backup-s3 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | starting test step 9-check-retention-s3 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | running command: [sh -c set -o errexit set -o xtrace source ../../functions retention_count=2 backups_count=$(kubectl -n "${NAMESPACE}" get pg-backup -o yaml | yq '.items | map(select(.metadata.name != "demand-backup-full-azure*")) | length') jobs_count=$(kubectl -n "${NAMESPACE}" get jobs -o yaml | yq '.items | map(select(.metadata.ownerReferences.[].name != "demand-backup-full-azure*")) | length') if [[ $backups_count != $retention_count ]]; then echo "There are $backups_count backups, but our retention is set to $retention_count" exit 1 fi if [[ $jobs_count != $retention_count ]]; then echo "There are $jobs_count jobs, but our retention is set to $retention_count" exit 1 fi] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | + source ../../functions logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ realpath ../../.. logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ pwd logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++ test_name=demand-backup logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ command -v oc logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ ! -n '' ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ -n '' ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export PG_VER=18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ PG_VER=18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export BUCKET=pg-operator-testing logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ BUCKET=pg-operator-testing logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export PGOV1_VER=14 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ PGOV1_VER=14 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export MINIO_VER=5.4.0 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ MINIO_VER=5.4.0 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ REGISTRY_NAME=docker.io logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ printenv logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ grep -E '^IMAGE' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ awk -F= '{print $1}' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ which gdate logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ which date logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ date=/usr/sbin/date logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ which gsed logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++++ which sed logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | +++ sed=/usr/sbin/sed logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++ oc get projects logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | + retention_count=2 logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++ kubectl -n kuttl-test-enough-midge get pg-backup -o yaml logger.go:42: 17:29:20 | demand-backup/9-check-retention-s3 | ++ yq '.items | map(select(.metadata.name != "demand-backup-full-azure*")) | length' logger.go:42: 17:29:21 | demand-backup/9-check-retention-s3 | + backups_count=2 logger.go:42: 17:29:21 | demand-backup/9-check-retention-s3 | ++ kubectl -n kuttl-test-enough-midge get jobs -o yaml logger.go:42: 17:29:21 | demand-backup/9-check-retention-s3 | ++ yq '.items | map(select(.metadata.ownerReferences.[].name != "demand-backup-full-azure*")) | length' logger.go:42: 17:29:21 | demand-backup/9-check-retention-s3 | + jobs_count=2 logger.go:42: 17:29:21 | demand-backup/9-check-retention-s3 | + [[ 2 != 2 ]] logger.go:42: 17:29:21 | demand-backup/9-check-retention-s3 | + [[ 2 != 2 ]] logger.go:42: 17:29:21 | demand-backup/9-check-retention-s3 | test step completed 9-check-retention-s3 logger.go:42: 17:29:21 | demand-backup/10-create-second-backup-azure | starting test step 10-create-second-backup-azure logger.go:42: 17:29:22 | demand-backup/10-create-second-backup-azure | PerconaPGBackup:kuttl-test-enough-midge/demand-backup-full-azure-2 created logger.go:42: 17:30:43 | demand-backup/10-create-second-backup-azure | test step completed 10-create-second-backup-azure logger.go:42: 17:30:43 | demand-backup/11-create-third-backup-azure | starting test step 11-create-third-backup-azure logger.go:42: 17:30:43 | demand-backup/11-create-third-backup-azure | PerconaPGBackup:kuttl-test-enough-midge/demand-backup-full-azure-3 created logger.go:42: 17:32:51 | demand-backup/11-create-third-backup-azure | test step completed 11-create-third-backup-azure logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | starting test step 12-check-retention-azure logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | running command: [sh -c set -o errexit set -o xtrace source ../../functions retention_count=2 backups_count=$(kubectl -n "${NAMESPACE}" get pg-backup -o yaml | yq '.items | map(select(.metadata.name == "demand-backup-full-azure*")) | length') jobs_count=$(kubectl -n "${NAMESPACE}" get jobs -o yaml | yq '.items | map(select(.metadata.ownerReferences.[].name == "demand-backup-full-azure*")) | length') if [[ $backups_count != $retention_count ]]; then echo "There are $backups_count backups, but our retention is set to $retention_count" exit 1 fi if [[ $jobs_count != $retention_count ]]; then echo "There are $jobs_count jobs, but our retention is set to $retention_count" exit 1 fi] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | + source ../../functions logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ realpath ../../.. logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ pwd logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++ test_name=demand-backup logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ command -v oc logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ ! -n '' ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ -n '' ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export PG_VER=18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ PG_VER=18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export BUCKET=pg-operator-testing logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ BUCKET=pg-operator-testing logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export PGOV1_VER=14 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ PGOV1_VER=14 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export MINIO_VER=5.4.0 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ MINIO_VER=5.4.0 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ REGISTRY_NAME=docker.io logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ printenv logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ grep -E '^IMAGE' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ awk -F= '{print $1}' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ which gdate logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ which date logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ date=/usr/sbin/date logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ which gsed logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++++ which sed logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | +++ sed=/usr/sbin/sed logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++ oc get projects logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | + retention_count=2 logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++ kubectl -n kuttl-test-enough-midge get pg-backup -o yaml logger.go:42: 17:32:51 | demand-backup/12-check-retention-azure | ++ yq '.items | map(select(.metadata.name == "demand-backup-full-azure*")) | length' logger.go:42: 17:32:52 | demand-backup/12-check-retention-azure | + backups_count=2 logger.go:42: 17:32:52 | demand-backup/12-check-retention-azure | ++ kubectl -n kuttl-test-enough-midge get jobs -o yaml logger.go:42: 17:32:52 | demand-backup/12-check-retention-azure | ++ yq '.items | map(select(.metadata.ownerReferences.[].name == "demand-backup-full-azure*")) | length' logger.go:42: 17:32:52 | demand-backup/12-check-retention-azure | + jobs_count=2 logger.go:42: 17:32:52 | demand-backup/12-check-retention-azure | + [[ 2 != 2 ]] logger.go:42: 17:32:52 | demand-backup/12-check-retention-azure | + [[ 2 != 2 ]] logger.go:42: 17:32:52 | demand-backup/12-check-retention-azure | test step completed 12-check-retention-azure logger.go:42: 17:32:52 | demand-backup/13-delete-backup | starting test step 13-delete-backup logger.go:42: 17:32:52 | demand-backup/13-delete-backup | running command: [sh -c set -o errexit set -o xtrace source ../../functions kubectl delete pg-backup -n "${NAMESPACE}" demand-backup-full-s3 kubectl delete pg-backup -n "${NAMESPACE}" demand-backup-full-azure-2] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | + source ../../functions logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ realpath ../../.. logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ pwd logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++ test_name=demand-backup logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ command -v oc logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ ! -n '' ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ -n '' ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export PG_VER=18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ PG_VER=18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export BUCKET=pg-operator-testing logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ BUCKET=pg-operator-testing logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export PGOV1_VER=14 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ PGOV1_VER=14 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export MINIO_VER=5.4.0 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ MINIO_VER=5.4.0 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ REGISTRY_NAME=docker.io logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ printenv logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ grep -E '^IMAGE' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ awk -F= '{print $1}' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ which gdate logger.go:42: 17:32:52 | demand-backup/13-delete-backup | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ which date logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ date=/usr/sbin/date logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ which gsed logger.go:42: 17:32:52 | demand-backup/13-delete-backup | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++++ which sed logger.go:42: 17:32:52 | demand-backup/13-delete-backup | +++ sed=/usr/sbin/sed logger.go:42: 17:32:52 | demand-backup/13-delete-backup | ++ oc get projects logger.go:42: 17:32:52 | demand-backup/13-delete-backup | + kubectl delete pg-backup -n kuttl-test-enough-midge demand-backup-full-s3 logger.go:42: 17:32:53 | demand-backup/13-delete-backup | perconapgbackup.pgv2.percona.com "demand-backup-full-s3" deleted from kuttl-test-enough-midge namespace logger.go:42: 17:32:53 | demand-backup/13-delete-backup | + kubectl delete pg-backup -n kuttl-test-enough-midge demand-backup-full-azure-2 logger.go:42: 17:32:53 | demand-backup/13-delete-backup | perconapgbackup.pgv2.percona.com "demand-backup-full-azure-2" deleted from kuttl-test-enough-midge namespace logger.go:42: 17:32:54 | demand-backup/13-delete-backup | test step completed 13-delete-backup logger.go:42: 17:32:54 | demand-backup/14-recreate-backup-s3 | starting test step 14-recreate-backup-s3 logger.go:42: 17:32:54 | demand-backup/14-recreate-backup-s3 | PerconaPGBackup:kuttl-test-enough-midge/demand-backup-full-s3 created logger.go:42: 17:34:40 | demand-backup/14-recreate-backup-s3 | test step completed 14-recreate-backup-s3 logger.go:42: 17:34:40 | demand-backup/15-recreate-backup-azure | starting test step 15-recreate-backup-azure logger.go:42: 17:34:41 | demand-backup/15-recreate-backup-azure | PerconaPGBackup:kuttl-test-enough-midge/demand-backup-full-azure created logger.go:42: 17:37:09 | demand-backup/15-recreate-backup-azure | test step completed 15-recreate-backup-azure logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | starting test step 16-check-password-leak logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | running command: [sh -c set -o errexit set -o xtrace source ../../functions # Temporarily skipping this check # check_passwords_leak] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | + source ../../functions logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ realpath ../../.. logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ pwd logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++ test_name=demand-backup logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ command -v oc logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ ! -n '' ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ -n '' ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export PG_VER=18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ PG_VER=18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export BUCKET=pg-operator-testing logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ BUCKET=pg-operator-testing logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export PGOV1_VER=14 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ PGOV1_VER=14 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export MINIO_VER=5.4.0 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ MINIO_VER=5.4.0 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ REGISTRY_NAME=docker.io logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ printenv logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ grep -E '^IMAGE' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ awk -F= '{print $1}' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ which gdate logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ which date logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ date=/usr/sbin/date logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ which gsed logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++++ which sed logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | +++ sed=/usr/sbin/sed logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | ++ oc get projects logger.go:42: 17:37:09 | demand-backup/16-check-password-leak | test step completed 16-check-password-leak logger.go:42: 17:37:09 | demand-backup/17-delete-data | starting test step 17-delete-data logger.go:42: 17:37:09 | demand-backup/17-delete-data | running command: [sh -c set -o errexit set -o xtrace source ../../functions run_psql_local \ '\c myapp \\\ TRUNCATE TABLE myApp' \ "postgres:$(get_psql_user_pass demand-backup-pguser-postgres)@$(get_psql_user_host demand-backup-pguser-postgres)"] logger.go:42: 17:37:09 | demand-backup/17-delete-data | + source ../../functions logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ realpath ../../.. logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ pwd logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++ test_name=demand-backup logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ command -v oc logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ ! -n '' ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ -n '' ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export PG_VER=18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ PG_VER=18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export BUCKET=pg-operator-testing logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ BUCKET=pg-operator-testing logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export PGOV1_VER=14 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ PGOV1_VER=14 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export MINIO_VER=5.4.0 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ MINIO_VER=5.4.0 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ REGISTRY_NAME=docker.io logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ printenv logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ grep -E '^IMAGE' logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ awk -F= '{print $1}' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ which gdate logger.go:42: 17:37:09 | demand-backup/17-delete-data | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ which date logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ date=/usr/sbin/date logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ which gsed logger.go:42: 17:37:09 | demand-backup/17-delete-data | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++++ which sed logger.go:42: 17:37:09 | demand-backup/17-delete-data | +++ sed=/usr/sbin/sed logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++ oc get projects logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++ get_psql_user_pass demand-backup-pguser-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 17:37:09 | demand-backup/17-delete-data | ++ kubectl -n kuttl-test-enough-midge get secret/demand-backup-pguser-postgres '--template={{.data.password | base64decode}}' logger.go:42: 17:37:10 | demand-backup/17-delete-data | ++ get_psql_user_host demand-backup-pguser-postgres logger.go:42: 17:37:10 | demand-backup/17-delete-data | ++ local secret_name=demand-backup-pguser-postgres logger.go:42: 17:37:10 | demand-backup/17-delete-data | ++ kubectl -n kuttl-test-enough-midge get secret/demand-backup-pguser-postgres '--template={{.data.host | base64decode }}' logger.go:42: 17:37:10 | demand-backup/17-delete-data | + run_psql_local '\c myapp \\\ TRUNCATE TABLE myApp' postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc logger.go:42: 17:37:10 | demand-backup/17-delete-data | + local 'command=\c myapp \\\ TRUNCATE TABLE myApp' logger.go:42: 17:37:10 | demand-backup/17-delete-data | + local uri=postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc logger.go:42: 17:37:10 | demand-backup/17-delete-data | + local driver=postgres logger.go:42: 17:37:10 | demand-backup/17-delete-data | ++ get_client_pod logger.go:42: 17:37:10 | demand-backup/17-delete-data | ++ kubectl -n kuttl-test-enough-midge get pods --selector=name=pg-client -o 'jsonpath={.items[].metadata.name}' logger.go:42: 17:37:11 | demand-backup/17-delete-data | + kubectl -n kuttl-test-enough-midge exec pg-client-65d98588cc-4nm8s -- bash -c 'printf '\''\c myapp \\\ TRUNCATE TABLE myApp\n'\'' | psql -v ON_ERROR_STOP=1 -t -q postgres://'\''postgres:NLjIhaVCgSibxoucsdhBxaJ6@demand-backup-primary.kuttl-test-enough-midge.svc'\''' logger.go:42: 17:37:12 | demand-backup/17-delete-data | test step completed 17-delete-data logger.go:42: 17:37:12 | demand-backup/18-create-restore-s3 | starting test step 18-create-restore-s3 logger.go:42: 17:37:12 | demand-backup/18-create-restore-s3 | running command: [sh -c set -o errexit set -o xtrace source ../../functions primary=$(get_pod_by_role demand-backup primary name) latest_full_repo1_backup=$(kubectl -n ${NAMESPACE} exec ${primary} -- pgbackrest info --output json --log-level-console=info | jq '[.[] | .backup[] | select(.type == "full") | select(.database.["repo-key"] == 1)][-1].label') cat <&1 >/dev/null) if [[ $res == *$(echo "No resources found in ${NAMESPACE} namespace.")* ]]; then data=0 fi kubectl create configmap -n "${NAMESPACE}" 25-pg-backup-objects --from-literal=data="${data}"] logger.go:42: 17:40:53 | demand-backup/25-delete-cluster-with-finalizer | + kubectl delete pg -n kuttl-test-enough-midge demand-backup logger.go:42: 17:40:54 | demand-backup/25-delete-cluster-with-finalizer | perconapgcluster.pgv2.percona.com "demand-backup" deleted from kuttl-test-enough-midge namespace logger.go:42: 17:42:43 | demand-backup/25-delete-cluster-with-finalizer | + sleep 15 logger.go:42: 17:42:58 | demand-backup/25-delete-cluster-with-finalizer | + data=1 logger.go:42: 17:42:58 | demand-backup/25-delete-cluster-with-finalizer | ++ kubectl -n kuttl-test-enough-midge get pg-backup logger.go:42: 17:42:58 | demand-backup/25-delete-cluster-with-finalizer | + res='No resources found in kuttl-test-enough-midge namespace.' logger.go:42: 17:42:58 | demand-backup/25-delete-cluster-with-finalizer | ++ echo 'No resources found in kuttl-test-enough-midge namespace.' logger.go:42: 17:42:58 | demand-backup/25-delete-cluster-with-finalizer | + [[ No resources found in kuttl-test-enough-midge namespace. == *No resources found in kuttl-test-enough-midge namespace.* ]] logger.go:42: 17:42:58 | demand-backup/25-delete-cluster-with-finalizer | + data=0 logger.go:42: 17:42:58 | demand-backup/25-delete-cluster-with-finalizer | + kubectl create configmap -n kuttl-test-enough-midge 25-pg-backup-objects --from-literal=data=0 logger.go:42: 17:42:59 | demand-backup/25-delete-cluster-with-finalizer | configmap/25-pg-backup-objects created logger.go:42: 17:43:00 | demand-backup/25-delete-cluster-with-finalizer | test step completed 25-delete-cluster-with-finalizer logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | starting test step 99-remove-cluster-gracefully logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | running command: [sh -c set -o errexit set -o xtrace source ../../functions remove_all_finalizers destroy_operator] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + source ../../functions logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ realpath ../../.. logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++ CERT_MANAGER_VER=1.19.1 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ pwd logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ basename /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/tests/demand-backup logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++ test_name=demand-backup logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++ source /mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/vars.sh logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ ROOT_REPO=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ DEPLOY_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/deploy logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ TESTS_CONFIG_DIR=/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/e2e-tests/conf logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ TEMP_DIR=/tmp/kuttl/pg/demand-backup logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ git rev-parse --abbrev-ref HEAD logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export GIT_BRANCH=PR-1233 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ GIT_BRANCH=PR-1233 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export VERSION=PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ VERSION=PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export SKIP_TEST_WARNINGS=true logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ SKIP_TEST_WARNINGS=true logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ command -v oc logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BASE=perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ ! -n '' ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ -n '' ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export PG_VER=18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ PG_VER=18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PGBOUNCER=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_POSTGRESQL=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BACKREST=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_UPGRADE=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export BUCKET=pg-operator-testing logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ BUCKET=pg-operator-testing logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ PMM_SERVER_VERSION=9.9.9 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM3_CLIENT=perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM3_SERVER=perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export PGOV1_TAG=1.4.0 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ PGOV1_TAG=1.4.0 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export PGOV1_VER=14 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ PGOV1_VER=14 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export MINIO_VER=5.4.0 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ MINIO_VER=5.4.0 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export REGISTRY_NAME=docker.io logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ REGISTRY_NAME=docker.io logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ REGISTRY_NAME_FULL=docker.io/ logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ printenv logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ grep -E '^IMAGE' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ awk -F= '{print $1}' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_BACKREST' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbackrest18 == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_BACKREST=docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PMM3_CLIENT' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:3.4 == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:3.4 == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:3.4 == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_PMM3_CLIENT=docker.io/perconalab/pmm-client:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE=docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_UPGRADE' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-upgrade == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_UPGRADE=docker.io/perconalab/percona-postgresql-operator:main-upgrade logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_BASE' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_BASE=docker.io/perconalab/percona-postgresql-operator logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PMM3_SERVER' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:3.4 == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:3.4 == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:3.4 == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_PMM3_SERVER=docker.io/perconalab/pmm-server:3.4 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PGBOUNCER' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-pgbouncer18 == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_PGBOUNCER=docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_POSTGRESQL' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/percona-postgresql-operator:main-ppg18-postgres == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_POSTGRESQL=docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PMM_SERVER' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:dev-latest == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:dev-latest == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-server:dev-latest == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_PMM_SERVER=docker.io/perconalab/pmm-server:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ for var in $(printenv | grep -E '^IMAGE' | awk -F'=' '{print $1}') logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ eval 'echo $IMAGE_PMM_CLIENT' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++++ echo perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ var_value=perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:dev-latest == docker.io/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:dev-latest == percona/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ [[ perconalab/pmm-client:dev-latest == perconalab/* ]] logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ new_value=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ export IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | IMAGE_PMM_CLIENT=docker.io/perconalab/pmm-client:dev-latest logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ echo docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084 logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ which gdate logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | which: no gdate in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ which date logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ date=/usr/sbin/date logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ which gsed logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | which: no gsed in (/mnt/jenkins/workspace/cloud-pg-operator_PR-1233/bin/:/root/.krew/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/var/lib/snapd/snap/bin) logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++++ which sed logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | +++ sed=/usr/sbin/sed logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | ++ oc get projects logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + remove_all_finalizers logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + resource_types=("pg-restore" "pg-backup" "pg") logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + for resource in "${resource_types[@]}" logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg-restore resources' logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg-restore resources logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-enough-midge get pg-restore -o json logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 17:43:00 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 17:43:01 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-enough-midge delete pg-restore demand-backup-restore --wait=0 logger.go:42: 17:43:01 | demand-backup/99-remove-cluster-gracefully | perconapgrestore.pgv2.percona.com "demand-backup-restore" deleted from kuttl-test-enough-midge namespace logger.go:42: 17:43:01 | demand-backup/99-remove-cluster-gracefully | ++ kubectl -n kuttl-test-enough-midge get pg-restore demand-backup-restore -o yaml logger.go:42: 17:43:01 | demand-backup/99-remove-cluster-gracefully | ++ yq '.metadata.finalizers | length' logger.go:42: 17:43:02 | demand-backup/99-remove-cluster-gracefully | + [[ 1 == \0 ]] logger.go:42: 17:43:02 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-enough-midge patch pg-restore demand-backup-restore --type=json '-p=[{"op": "remove", "path": "/metadata/finalizers"}]' logger.go:42: 17:43:02 | demand-backup/99-remove-cluster-gracefully | perconapgrestore.pgv2.percona.com/demand-backup-restore patched logger.go:42: 17:43:02 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 17:43:02 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 17:43:02 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-enough-midge delete pg-restore demand-backup-restore-azure --wait=0 logger.go:42: 17:43:03 | demand-backup/99-remove-cluster-gracefully | perconapgrestore.pgv2.percona.com "demand-backup-restore-azure" deleted from kuttl-test-enough-midge namespace logger.go:42: 17:43:03 | demand-backup/99-remove-cluster-gracefully | ++ kubectl -n kuttl-test-enough-midge get pg-restore demand-backup-restore-azure -o yaml logger.go:42: 17:43:03 | demand-backup/99-remove-cluster-gracefully | ++ yq '.metadata.finalizers | length' logger.go:42: 17:43:03 | demand-backup/99-remove-cluster-gracefully | + [[ 1 == \0 ]] logger.go:42: 17:43:03 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-enough-midge patch pg-restore demand-backup-restore-azure --type=json '-p=[{"op": "remove", "path": "/metadata/finalizers"}]' logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | perconapgrestore.pgv2.percona.com/demand-backup-restore-azure patched logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | + for resource in "${resource_types[@]}" logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg-backup resources' logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg-backup resources logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-enough-midge get pg-backup -o json logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 17:43:04 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | + for resource in "${resource_types[@]}" logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | + echo 'removing all finalizers for pg resources' logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | removing all finalizers for pg resources logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | + kubectl -n kuttl-test-enough-midge get pg -o json logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | + jq '.items[] | .metadata.name' -r logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | + IFS= logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | + read -r name logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | + destroy_operator logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | + kubectl -n pg-operator delete deployment percona-postgresql-operator --force --grace-period=0 logger.go:42: 17:43:05 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 17:43:06 | demand-backup/99-remove-cluster-gracefully | deployment.apps "percona-postgresql-operator" force deleted from pg-operator namespace logger.go:42: 17:43:06 | demand-backup/99-remove-cluster-gracefully | + [[ -n pg-operator ]] logger.go:42: 17:43:06 | demand-backup/99-remove-cluster-gracefully | + kubectl delete namespace pg-operator --force --grace-period=0 logger.go:42: 17:43:06 | demand-backup/99-remove-cluster-gracefully | Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. logger.go:42: 17:43:06 | demand-backup/99-remove-cluster-gracefully | namespace "pg-operator" force deleted logger.go:42: 17:43:18 | demand-backup/99-remove-cluster-gracefully | test step completed 99-remove-cluster-gracefully logger.go:42: 17:43:18 | demand-backup | demand-backup events from ns kuttl-test-enough-midge: logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:23 +0000 UTC Normal Pod pg-client-65d98588cc-4nm8s Binding Scheduled Successfully assigned kuttl-test-enough-midge/pg-client-65d98588cc-4nm8s to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-9gpb default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:23 +0000 UTC Normal Pod pg-client-65d98588cc-4nm8s.spec.containers{pg-client} Pulling Pulling image "perconalab/percona-distribution-postgresql:16" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:23 +0000 UTC Normal ReplicaSet.apps pg-client-65d98588cc SuccessfulCreate Created pod: pg-client-65d98588cc-4nm8s replicaset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:23 +0000 UTC Normal Deployment.apps pg-client ScalingReplicaSet Scaled up replica set pg-client-65d98588cc from 0 to 1 deployment-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:24 +0000 UTC Normal Pod pg-client-65d98588cc-4nm8s.spec.containers{pg-client} Pulled Successfully pulled image "perconalab/percona-distribution-postgresql:16" in 219ms (219ms including waiting). Image size: 564266549 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:24 +0000 UTC Normal Pod pg-client-65d98588cc-4nm8s.spec.containers{pg-client} Created Created container: pg-client kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:24 +0000 UTC Normal Pod pg-client-65d98588cc-4nm8s.spec.containers{pg-client} Started Started container pg-client kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:26 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-ca-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-selfsigned logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:26 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-ca-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:26 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-ca-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-venafi logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:26 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-ca-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-vault logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:26 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-ca-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-acme logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:26 +0000 UTC Normal Certificate.cert-manager.io demand-backup-cluster-ca-cert Issuing Issuing certificate as Secret does not exist cert-manager-certificates-trigger logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:26 +0000 UTC Normal Certificate.cert-manager.io demand-backup-cluster-ca-cert Generated Stored new private key in temporary Secret resource "demand-backup-cluster-ca-cert-4z4fn" cert-manager-certificates-key-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:26 +0000 UTC Normal Certificate.cert-manager.io demand-backup-cluster-ca-cert Requested Created new CertificateRequest resource "demand-backup-cluster-ca-cert-1" cert-manager-certificates-request-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:27 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-ca-cert-1 cert-manager.io Certificate request has been approved by cert-manager.io cert-manager-certificaterequests-approver logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:27 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-ca-cert-1 CertificateIssued Certificate fetched from issuer successfully cert-manager-certificaterequests-issuer-selfsigned logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:27 +0000 UTC Normal Certificate.cert-manager.io demand-backup-cluster-ca-cert Issuing The certificate has been successfully issued cert-manager-certificates-issuing logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:27 +0000 UTC Normal Certificate.cert-manager.io demand-backup-cluster-cert Issuing Issuing certificate as Secret does not exist cert-manager-certificates-trigger logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:27 +0000 UTC Normal Certificate.cert-manager.io demand-backup-cluster-cert Generated Stored new private key in temporary Secret resource "demand-backup-cluster-cert-dhnrx" cert-manager-certificates-key-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:27 +0000 UTC Normal Issuer.cert-manager.io demand-backup-tls-issuer KeyPairVerified Signing CA verified cert-manager-issuers logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-selfsigned logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-acme logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-vault logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-venafi logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-cert-1 cert-manager.io Certificate request has been approved by cert-manager.io cert-manager-certificaterequests-approver logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-cluster-cert-1 CertificateIssued Certificate fetched from issuer successfully cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal Certificate.cert-manager.io demand-backup-cluster-cert Requested Created new CertificateRequest resource "demand-backup-cluster-cert-1" cert-manager-certificates-request-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal Certificate.cert-manager.io demand-backup-cluster-cert Issuing The certificate has been successfully issued cert-manager-certificates-issuing logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-jssh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-acme logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-jssh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-vault logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-jssh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-venafi logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-jssh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-jssh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-selfsigned logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-jssh-cert-1 cert-manager.io Certificate request has been approved by cert-manager.io cert-manager-certificaterequests-approver logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-jssh-cert Issuing Issuing certificate as Secret does not contain a private key cert-manager-certificates-trigger logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-jssh-cert Generated Stored new private key in temporary Secret resource "demand-backup-instance1-jssh-cert-5cdq7" cert-manager-certificates-key-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-jssh-cert Requested Created new CertificateRequest resource "demand-backup-instance1-jssh-cert-1" cert-manager-certificates-request-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-jssh-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-jssh-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-jssh-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-enough-midge/demand-backup-instance1-jssh-pgdata" pd.csi.storage.gke.io_gke-a1d0aa8c220d49c7a322-56d4-2044-vm_381d5f5b-0c40-4bb8-8ecf-8b7414079309 logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-jssh SuccessfulCreate create Pod demand-backup-instance1-jssh-0 in StatefulSet demand-backup-instance1-jssh successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-pwm9-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-pwm9-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-pwm9-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-enough-midge/demand-backup-instance1-pwm9-pgdata" pd.csi.storage.gke.io_gke-a1d0aa8c220d49c7a322-56d4-2044-vm_381d5f5b-0c40-4bb8-8ecf-8b7414079309 logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-pwm9 SuccessfulCreate create Pod demand-backup-instance1-pwm9-0 in StatefulSet demand-backup-instance1-pwm9 successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:28 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-rgqh-pgdata WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:29 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-jssh-cert-1 CertificateIssued Certificate fetched from issuer successfully cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:29 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-rgqh-cert Issuing Issuing certificate as Secret does not exist cert-manager-certificates-trigger logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:29 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-rgqh-pgdata ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'pd.csi.storage.gke.io' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:29 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-rgqh-pgdata Provisioning External provisioner is provisioning volume for claim "kuttl-test-enough-midge/demand-backup-instance1-rgqh-pgdata" pd.csi.storage.gke.io_gke-a1d0aa8c220d49c7a322-56d4-2044-vm_381d5f5b-0c40-4bb8-8ecf-8b7414079309 logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:29 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-rgqh SuccessfulCreate create Pod demand-backup-instance1-rgqh-0 in StatefulSet demand-backup-instance1-rgqh successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:29 +0000 UTC Normal Pod demand-backup-repo-host-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-repo-host-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-xmmj default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:29 +0000 UTC Normal StatefulSet.apps demand-backup-repo-host SuccessfulCreate create Pod demand-backup-repo-host-0 in StatefulSet demand-backup-repo-host successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:29 +0000 UTC Normal PostgresCluster.postgres-operator.crunchydata.com demand-backup RepoHostCreated created pgBackRest repository host StatefulSet/demand-backup-repo-host postgrescluster-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-jssh-cert Issuing The certificate has been successfully issued cert-manager-certificates-issuing logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-pwm9-cert Issuing Issuing certificate as Secret does not contain a private key cert-manager-certificates-trigger logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-rgqh-cert Generated Stored new private key in temporary Secret resource "demand-backup-instance1-rgqh-cert-29ps2" cert-manager-certificates-key-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-pgbouncer-7cd7d68587-877tj to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-xmmj default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-pgbouncer-7cd7d68587-krwmg to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-pgbouncer-7cd7d68587-sm8pt to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-9gpb default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-7cd7d68587 SuccessfulCreate Created pod: demand-backup-pgbouncer-7cd7d68587-krwmg replicaset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-7cd7d68587 SuccessfulCreate Created pod: demand-backup-pgbouncer-7cd7d68587-sm8pt replicaset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal ReplicaSet.apps demand-backup-pgbouncer-7cd7d68587 SuccessfulCreate Created pod: demand-backup-pgbouncer-7cd7d68587-877tj replicaset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-pgbouncer NoPods No matching pods found controllermanager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Deployment.apps demand-backup-pgbouncer ScalingReplicaSet Scaled up replica set demand-backup-pgbouncer-7cd7d68587 from 0 to 3 deployment-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 203ms (203ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:30 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-pwm9-cert Generated Stored new private key in temporary Secret resource "demand-backup-instance1-pwm9-cert-rf6c4" cert-manager-certificates-key-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-rgqh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-venafi logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-rgqh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-acme logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-rgqh-cert Requested Created new CertificateRequest resource "demand-backup-instance1-rgqh-cert-1" cert-manager-certificates-request-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 240ms (240ms including waiting). Image size: 87614503 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer-config} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 201ms (201ms including waiting). Image size: 87614503 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 195ms (195ms including waiting). Image size: 87614503 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer-config} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 147ms (147ms including waiting). Image size: 87614503 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Certificate.cert-manager.io demand-backup-pgbouncer-cert Issuing Issuing certificate as Secret does not exist cert-manager-certificates-trigger logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Certificate.cert-manager.io demand-backup-pgbouncer-cert Generated Stored new private key in temporary Secret resource "demand-backup-pgbouncer-cert-ltw7k" cert-manager-certificates-key-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 258ms (258ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:31 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 244ms (244ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-instance1-jssh-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-jssh-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-xmmj default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-jssh-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-a4f75cd5-a5c0-4536-a49b-85b31052ee0f pd.csi.storage.gke.io_gke-a1d0aa8c220d49c7a322-56d4-2044-vm_381d5f5b-0c40-4bb8-8ecf-8b7414079309 logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-pwm9-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-9gpb default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-pwm9-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-vault logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-pwm9-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-selfsigned logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-pwm9-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-acme logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-pwm9-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-pwm9-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-venafi logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-pwm9-cert Requested Created new CertificateRequest resource "demand-backup-instance1-pwm9-cert-1" cert-manager-certificates-request-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-pwm9-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-7ba78e04-14f3-44e4-8504-125ad51ee1b2 pd.csi.storage.gke.io_gke-a1d0aa8c220d49c7a322-56d4-2044-vm_381d5f5b-0c40-4bb8-8ecf-8b7414079309 logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-rgqh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-vault logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-rgqh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-rgqh-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-selfsigned logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-rgqh-cert-1 cert-manager.io Certificate request has been approved by cert-manager.io cert-manager-certificaterequests-approver logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal PersistentVolumeClaim demand-backup-instance1-rgqh-pgdata ProvisioningSucceeded Successfully provisioned volume pvc-755012cf-6827-4102-805f-3eb44213ceeb pd.csi.storage.gke.io_gke-a1d0aa8c220d49c7a322-56d4-2044-vm_381d5f5b-0c40-4bb8-8ecf-8b7414079309 logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 164ms (164ms including waiting). Image size: 87614503 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer} Created Created container: pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer} Started Started container pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer-config} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbouncer18" in 196ms (196ms including waiting). Image size: 87614503 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer-config} Created Created container: pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer-config} Started Started container pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Created Created container: pgbackrest-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:32 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Started Started container pgbackrest-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-pwm9-cert-1 cert-manager.io Certificate request has been approved by cert-manager.io cert-manager-certificaterequests-approver logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-pwm9-cert-1 CertificateIssued Certificate fetched from issuer successfully cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-rgqh-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-instance1-rgqh-cert-1 CertificateIssued Certificate fetched from issuer successfully cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-pgbouncer-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-vault logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-pgbouncer-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-venafi logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-pgbouncer-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-selfsigned logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-pgbouncer-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-acme logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-pgbouncer-cert-1 WaitingForApproval Not signing CertificateRequest until it is Approved cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-pgbouncer-cert-1 cert-manager.io Certificate request has been approved by cert-manager.io cert-manager-certificaterequests-approver logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:33 +0000 UTC Normal Certificate.cert-manager.io demand-backup-pgbouncer-cert Requested Created new CertificateRequest resource "demand-backup-pgbouncer-cert-1" cert-manager-certificates-request-manager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:34 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-pwm9-cert Issuing The certificate has been successfully issued cert-manager-certificates-issuing logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:34 +0000 UTC Normal Certificate.cert-manager.io demand-backup-instance1-rgqh-cert Issuing The certificate has been successfully issued cert-manager-certificates-issuing logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:34 +0000 UTC Normal CertificateRequest.cert-manager.io demand-backup-pgbouncer-cert-1 CertificateIssued Certificate fetched from issuer successfully cert-manager-certificaterequests-issuer-ca logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:35 +0000 UTC Normal Certificate.cert-manager.io demand-backup-pgbouncer-cert Issuing The certificate has been successfully issued cert-manager-certificates-issuing logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:40 +0000 UTC Normal Pod demand-backup-instance1-jssh-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a4f75cd5-a5c0-4536-a49b-85b31052ee0f" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:40 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7ba78e04-14f3-44e4-8504-125ad51ee1b2" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:40 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-755012cf-6827-4102-805f-3eb44213ceeb" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:41 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:41 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 182ms (182ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:41 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:41 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 209ms (209ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 245ms (245ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:42 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 199ms (199ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 215ms (215ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 217ms (217ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:43 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 210ms (210ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 201ms (201ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 175ms (175ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 161ms (161ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:44 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 250ms (250ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 231ms (231ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 191ms (191ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:45 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 203ms (203ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 223ms (223ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 186ms (186ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:46 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Warning Pod demand-backup-instance1-jssh-0.spec.containers{database} Unhealthy Readiness probe failed: kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 242ms (242ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 236ms (236ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:47 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:48 +0000 UTC Warning Pod demand-backup-instance1-rgqh-0.spec.containers{database} Unhealthy Readiness probe failed: kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:49 +0000 UTC Warning Pod demand-backup-instance1-pwm9-0.spec.containers{database} Unhealthy Readiness probe failed: kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:53 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-v6ml-k2jd5 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:53 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:53 +0000 UTC Normal Job.batch demand-backup-backup-v6ml SuccessfulCreate Created pod: demand-backup-backup-v6ml-k2jd5 job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:53 +0000 UTC Normal PostgresCluster.postgres-operator.crunchydata.com demand-backup StanzasCreated pgBackRest stanza creation completed successfully postgrescluster-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:54 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 203ms (203ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:54 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:54 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:54 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:54 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 185ms (185ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:54 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:54 +0000 UTC Normal Pod demand-backup-backup-v6ml-k2jd5.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:20:57 +0000 UTC Warning PostgresCluster.postgres-operator.crunchydata.com demand-backup UnableToCreateStanzas command terminated with exit code 50: 2026-02-17 17:20:57.678 P00 DEBUG: common/io/socket/common::sckInit: (block: false, keepAlive: true, tcpKeepAliveCount: 0, tcpKeepAliveIdle: 0, tcpKeepAliveInterval: 0) 2026-02-17 17:20:57.678 P00 DEBUG: common/io/socket/common::sckInit: => void 2026-02-17 17:20:57.681 P00 INFO: stanza-create command begin 2.57.0: --exec-id=445-7e6f6ad3 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/3819/repo1 --repo3-path=/backrestrepo/postgres-operator/3819/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db 2026-02-17 17:20:57.681 P00 DEBUG: common/lock::lockInit: (path: {"/tmp/pgbackrest"}, execId: {"445-7e6f6ad3"}) 2026-02-17 17:20:57.681 P00 DEBUG: storage/posix/storage::storagePosixNew: (path: {"/tmp/pgbackrest"}, param.modeFile: 0000, param.modePath: 0000, param.write: true, param.noSymLink: false, param.pathExpressionFunction: null) 2026-02-17 17:20:57.681 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: (type: posix, path: {"/tmp/pgbackrest"}, modeFile: 0640, modePath: 0750, write: true, pathExpressionFunction: null, pathSync: true, symLink: true) 2026-02-17 17:20:57.681 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: => {type: posix, path: /tmp/pgbackrest, write: true} 2026-02-17 17:20:57.681 P00 DEBUG: storage/posix/storage::storagePosixNew: => {type: posix, path: /tmp/pgbackrest, write: true} 2026-02-17 17:20:57.681 P00 DEBUG: common/lock::lockInit: => void 2026-02-17 17:20:57.681 P00 DEBUG: command/lock::cmdLockAcquire: (param.returnOnNoLock: false) 2026-02-17 17:20:57.682 P00 DEBUG: command/exit::exitSafe: (result: 0, error: true, signalType: 0) 2026-02-17 17:20:57.682 P00 ERROR: [050]: unable to acquire lock on file '/tmp/pgbackrest/db-backup-1.lock': Resource temporarily unavailable HINT: is another pgBackRest process running? -------------------------------------------------------------------- If SUBMITTING AN ISSUE please provide the following information: version: 2.57.0 command: stanza-create options: --exec-id=445-7e6f6ad3 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/3819/repo1 --repo3-path=/backrestrepo/postgres-operator/3819/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db stack trace: common/lock.c:lockAcquire:254:(trace log level required for parameters) command/lock.c:cmdLockAcquire:(param.returnOnNoLock: false) config/load.c:cfgLoad:(debug log level required for parameters) main.c:main:(debug log level required for parameters) -------------------------------------------------------------------- 2026-02-17 17:20:57.682 P00 INFO: stanza-create command end: aborted with exception [050] 2026-02-17 17:20:57.682 P00 DEBUG: command/exit::exitSafe: => 50 2026-02-17 17:20:57.682 P00 DEBUG: main::main: => 50 2026-02-17 17:20:57.690 P00 DEBUG: common/io/socket/common::sckInit: (block: false, keepAlive: true, tcpKeepAliveCount: 0, tcpKeepAliveIdle: 0, tcpKeepAliveInterval: 0) 2026-02-17 17:20:57.690 P00 DEBUG: common/io/socket/common::sckInit: => void 2026-02-17 17:20:57.692 P00 INFO: stanza-upgrade command begin 2.57.0: --exec-id=446-e547b091 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/3819/repo1 --repo3-path=/backrestrepo/postgres-operator/3819/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db 2026-02-17 17:20:57.693 P00 DEBUG: common/lock::lockInit: (path: {"/tmp/pgbackrest"}, execId: {"446-e547b091"}) 2026-02-17 17:20:57.693 P00 DEBUG: storage/posix/storage::storagePosixNew: (path: {"/tmp/pgbackrest"}, param.modeFile: 0000, param.modePath: 0000, param.write: true, param.noSymLink: false, param.pathExpressionFunction: null) 2026-02-17 17:20:57.693 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: (type: posix, path: {"/tmp/pgbackrest"}, modeFile: 0640, modePath: 0750, write: true, pathExpressionFunction: null, pathSync: true, symLink: true) 2026-02-17 17:20:57.693 P00 DEBUG: storage/posix/storage::storagePosixNewInternal: => {type: posix, path: /tmp/pgbackrest, write: true} 2026-02-17 17:20:57.693 P00 DEBUG: storage/posix/storage::storagePosixNew: => {type: posix, path: /tmp/pgbackrest, write: true} 2026-02-17 17:20:57.693 P00 DEBUG: common/lock::lockInit: => void 2026-02-17 17:20:57.693 P00 DEBUG: command/lock::cmdLockAcquire: (param.returnOnNoLock: false) 2026-02-17 17:20:57.693 P00 DEBUG: command/exit::exitSafe: (result: 0, error: true, signalType: 0) 2026-02-17 17:20:57.693 P00 ERROR: [050]: unable to acquire lock on file '/tmp/pgbackrest/db-backup-1.lock': Resource temporarily unavailable HINT: is another pgBackRest process running? -------------------------------------------------------------------- If SUBMITTING AN ISSUE please provide the following information: version: 2.57.0 command: stanza-upgrade options: --exec-id=446-e547b091 --log-level-console=debug --log-path=/pgdata/pgbackrest/log --pg1-path=/pgdata/pg18 --pg1-port=5432 --pg1-socket-path=/tmp/postgres --repo3-azure-account= --repo3-azure-container=pg-operator-testing --repo3-azure-key= --repo1-path=/backrestrepo/postgres-operator/3819/repo1 --repo3-path=/backrestrepo/postgres-operator/3819/repo3 --repo1-s3-bucket=pg-operator-testing --repo1-s3-endpoint=s3.amazonaws.com --repo1-s3-key= --repo1-s3-key-secret= --repo1-s3-region=us-east-1 --repo1-type=s3 --repo3-type=azure --stanza=db stack trace: common/lock.c:lockAcquire:254:(trace log level required for parameters) command/lock.c:cmdLockAcquire:(param.returnOnNoLock: false) config/load.c:cfgLoad:(debug log level required for parameters) main.c:main:(debug log level required for parameters) -------------------------------------------------------------------- 2026-02-17 17:20:57.693 P00 INFO: stanza-upgrade command end: aborted with exception [050] 2026-02-17 17:20:57.693 P00 DEBUG: command/exit::exitSafe: => 50 2026-02-17 17:20:57.693 P00 DEBUG: main::main: => 50 postgrescluster-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:18 +0000 UTC Normal Job.batch demand-backup-backup-v6ml Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:32 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-fs62-86np7 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:32 +0000 UTC Normal Job.batch demand-backup-backup-fs62 SuccessfulCreate Created pod: demand-backup-backup-fs62-86np7 job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:33 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:33 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 244ms (244ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:33 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:33 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:34 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:35 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 192ms (192ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:35 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:22:35 +0000 UTC Normal Pod demand-backup-backup-fs62-86np7.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:25:39 +0000 UTC Normal Job.batch demand-backup-backup-fs62 Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:05 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-d62z-jmx8t to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:05 +0000 UTC Normal Job.batch demand-backup-backup-d62z SuccessfulCreate Created pod: demand-backup-backup-d62z-jmx8t job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:06 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:06 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 210ms (210ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:06 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:06 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:07 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:07 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 222ms (222ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:07 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:26:07 +0000 UTC Normal Pod demand-backup-backup-d62z-jmx8t.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:09 +0000 UTC Normal Job.batch demand-backup-backup-d62z Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:38 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-cszj-mw5vz to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:38 +0000 UTC Normal Job.batch demand-backup-backup-cszj SuccessfulCreate Created pod: demand-backup-backup-cszj-mw5vz job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:39 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:39 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 150ms (150ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:39 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:39 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:40 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:40 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 171ms (171ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:40 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:27:40 +0000 UTC Normal Pod demand-backup-backup-cszj-mw5vz.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:03 +0000 UTC Normal Job.batch demand-backup-backup-cszj Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:28 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-kpjw-4bw2x to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:28 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:28 +0000 UTC Normal Job.batch demand-backup-backup-kpjw SuccessfulCreate Created pod: demand-backup-backup-kpjw-4bw2x job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:29 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 256ms (256ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:29 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:29 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:29 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:30 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 202ms (202ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:30 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:29:30 +0000 UTC Normal Pod demand-backup-backup-kpjw-4bw2x.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:26 +0000 UTC Normal Job.batch demand-backup-backup-kpjw Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:46 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-n8ns-8fhwq to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:46 +0000 UTC Normal Job.batch demand-backup-backup-n8ns SuccessfulCreate Created pod: demand-backup-backup-n8ns-8fhwq job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:47 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:47 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 220ms (220ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:47 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:47 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:48 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:48 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 147ms (147ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:48 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:30:48 +0000 UTC Normal Pod demand-backup-backup-n8ns-8fhwq.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:32 +0000 UTC Normal Job.batch demand-backup-backup-n8ns Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:57 +0000 UTC Normal Job.batch demand-backup-backup-g95r SuccessfulCreate Created pod: demand-backup-backup-g95r-t9rlm job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:58 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-g95r-t9rlm to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:58 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:58 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 213ms (213ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:58 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:58 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:59 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:59 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 182ms (182ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:59 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:32:59 +0000 UTC Normal Pod demand-backup-backup-g95r-t9rlm.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:20 +0000 UTC Normal Job.batch demand-backup-backup-g95r Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:46 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-c8gp-94cw7 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:46 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:46 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 160ms (160ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:46 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:46 +0000 UTC Normal Job.batch demand-backup-backup-c8gp SuccessfulCreate Created pod: demand-backup-backup-c8gp-94cw7 job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:47 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:47 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:48 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 212ms (212ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:48 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:34:48 +0000 UTC Normal Pod demand-backup-backup-c8gp-94cw7.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:36:50 +0000 UTC Normal Job.batch demand-backup-backup-c8gp Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Killing Stopping container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Killing Stopping container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Killing Stopping container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:19 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Killing Stopping container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:23 +0000 UTC Normal PodDisruptionBudget.policy demand-backup-set-instance1 NoPods No matching pods found controllermanager logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:24 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-pgbackrest-restore-tdzrw to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:24 +0000 UTC Warning Pod demand-backup-pgbackrest-restore-tdzrw FailedAttachVolume Multi-Attach error for volume "pvc-a4f75cd5-a5c0-4536-a49b-85b31052ee0f" Volume is already exclusively attached to one node and can't be attached to another attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:24 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore SuccessfulCreate Created pod: demand-backup-pgbackrest-restore-tdzrw job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:43 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a4f75cd5-a5c0-4536-a49b-85b31052ee0f" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:44 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:45 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 243ms (243ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:45 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:45 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:45 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw.spec.containers{pgbackrest-restore} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:45 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw.spec.containers{pgbackrest-restore} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 235ms (235ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:45 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw.spec.containers{pgbackrest-restore} Created Created container: pgbackrest-restore kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:45 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-tdzrw.spec.containers{pgbackrest-restore} Started Started container pgbackrest-restore kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:57 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:58 +0000 UTC Normal Pod demand-backup-instance1-jssh-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-jssh-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:37:58 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-jssh SuccessfulCreate create Pod demand-backup-instance1-jssh-0 in StatefulSet demand-backup-instance1-jssh successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:12 +0000 UTC Normal Pod demand-backup-instance1-jssh-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a4f75cd5-a5c0-4536-a49b-85b31052ee0f" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:13 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 184ms (184ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:15 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:15 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 197ms (198ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:15 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:15 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:16 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:16 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 266ms (266ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:16 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:16 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:17 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:17 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 228ms (228ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:17 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:17 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:17 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:18 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 217ms (217ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:18 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:18 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:18 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:18 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 218ms (218ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:18 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:18 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:18 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:20 +0000 UTC Warning Pod demand-backup-instance1-jssh-0.spec.containers{database} Unhealthy Readiness probe failed: kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:23 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-backup-dsj8-x6wcz to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-9gpb default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:23 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.initContainers{pgbackrest-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:23 +0000 UTC Normal Job.batch demand-backup-backup-dsj8 SuccessfulCreate Created pod: demand-backup-backup-dsj8-x6wcz job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:24 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.initContainers{pgbackrest-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 198ms (198ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:24 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.initContainers{pgbackrest-init} Created Created container: pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:24 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.initContainers{pgbackrest-init} Started Started container pgbackrest-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:25 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:25 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 220ms (220ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:25 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:25 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:26 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 149ms (149ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:26 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-pwm9-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-9gpb default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:26 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-pwm9 SuccessfulCreate create Pod demand-backup-instance1-pwm9-0 in StatefulSet demand-backup-instance1-pwm9 successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:26 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-rgqh-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-xmmj default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:26 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-rgqh SuccessfulCreate create Pod demand-backup-instance1-rgqh-0 in StatefulSet demand-backup-instance1-rgqh successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:27 +0000 UTC Warning Pod demand-backup-backup-dsj8-x6wcz.spec.containers{pgbackrest} BackOff Back-off restarting failed container pgbackrest in pod demand-backup-backup-dsj8-x6wcz_kuttl-test-enough-midge(145de9b2-7e17-42b1-bbc7-5faacd19df56) kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:33 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7ba78e04-14f3-44e4-8504-125ad51ee1b2" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-755012cf-6827-4102-805f-3eb44213ceeb" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:35 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:35 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 233ms (233ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:35 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:35 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 221ms (221ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:36 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:36 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 228ms (228ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:36 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:36 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:36 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 258ms (258ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:37 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 258ms (258ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:37 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:37 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 233ms (233ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 228ms (228ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 241ms (241ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:38 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 297ms (297ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 216ms (216ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 199ms (199ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 228ms (228ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 191ms (191ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:39 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:40 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:40 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:38:41 +0000 UTC Warning Pod demand-backup-instance1-pwm9-0.spec.containers{database} Unhealthy Readiness probe failed: kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:06 +0000 UTC Normal Pod demand-backup-backup-dsj8-x6wcz.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 203ms (203ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:25 +0000 UTC Normal Job.batch demand-backup-backup-dsj8 Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:46 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-pgbackrest-restore-cg4ds to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:46 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore SuccessfulCreate Created pod: demand-backup-pgbackrest-restore-cg4ds job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:50 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:50 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 193ms (193ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:50 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:50 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:51 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds.spec.containers{pgbackrest-restore} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:51 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds.spec.containers{pgbackrest-restore} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 217ms (217ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:52 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds.spec.containers{pgbackrest-restore} Created Created container: pgbackrest-restore kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:39:52 +0000 UTC Normal Pod demand-backup-pgbackrest-restore-cg4ds.spec.containers{pgbackrest-restore} Started Started container pgbackrest-restore kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:05 +0000 UTC Normal Pod demand-backup-instance1-jssh-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-jssh-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-2chq default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:05 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-jssh SuccessfulCreate create Pod demand-backup-instance1-jssh-0 in StatefulSet demand-backup-instance1-jssh successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:05 +0000 UTC Normal Job.batch demand-backup-pgbackrest-restore Completed Job completed job-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:10 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:11 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 209ms (209ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:11 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:11 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:11 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:12 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 239ms (239ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:12 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:12 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:12 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:13 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 157ms (157ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:13 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:13 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:13 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 222ms (223ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 218ms (218ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 168ms (168ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:14 +0000 UTC Normal Pod demand-backup-instance1-jssh-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:20 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-pwm9-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-9gpb default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:20 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-pwm9 SuccessfulCreate create Pod demand-backup-instance1-pwm9-0 in StatefulSet demand-backup-instance1-pwm9 successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:21 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0 Binding Scheduled Successfully assigned kuttl-test-enough-midge/demand-backup-instance1-rgqh-0 to gke-jen-pg-1233-8d8fb208-default-pool-73e907ef-xmmj default-scheduler logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:21 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-rgqh SuccessfulCreate create Pod demand-backup-instance1-rgqh-0 in StatefulSet demand-backup-instance1-rgqh successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:29 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-755012cf-6827-4102-805f-3eb44213ceeb" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:30 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:31 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 216ms (216ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:31 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:31 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:32 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-7ba78e04-14f3-44e4-8504-125ad51ee1b2" attachdetach-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:32 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:32 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 263ms (263ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:32 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:32 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:33 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:33 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:PR-1233-8d8fb2084" in 218ms (218ms including waiting). Image size: 88774917 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:33 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:33 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 240ms (240ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:33 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:33 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Created Created container: database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{database-init} Started Started container database-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 207ms (207ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Created Created container: postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 220ms (220ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 213ms (213ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:34 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{postgres-startup} Started Started container postgres-startup kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 216ms (217ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Created Created container: nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 162ms (162ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:35 +0000 UTC Normal Pod demand-backup-instance1-rgqh-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:36 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.initContainers{nss-wrapper-init} Started Started container nss-wrapper-init kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:36 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:36 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 244ms (244ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:36 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Created Created container: database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{database} Started Started container database kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-ppg18-postgres" in 225ms (225ms including waiting). Image size: 541756423 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Created Created container: replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{replication-cert-copy} Started Started container replication-cert-copy kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Pulled Successfully pulled image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" in 211ms (211ms including waiting). Image size: 164343043 bytes. kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Created Created container: pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:37 +0000 UTC Warning Pod demand-backup-instance1-rgqh-0.spec.containers{database} Unhealthy Readiness probe failed: kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:38 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest} Started Started container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:40:38 +0000 UTC Normal Pod demand-backup-instance1-pwm9-0.spec.containers{pgbackrest-config} Pulling Pulling image "docker.io/perconalab/percona-postgresql-operator:main-pgbackrest18" kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:45 +0000 UTC Warning Pod demand-backup-instance1-pwm9-0.spec.containers{database} Unhealthy Readiness probe failed: kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:45 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-pwm9 SuccessfulDelete delete Pod demand-backup-instance1-pwm9-0 in StatefulSet demand-backup-instance1-pwm9 successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:45 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-rgqh SuccessfulDelete delete Pod demand-backup-instance1-rgqh-0 in StatefulSet demand-backup-instance1-rgqh successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:47 +0000 UTC Normal StatefulSet.apps demand-backup-instance1-jssh SuccessfulDelete delete Pod demand-backup-instance1-jssh-0 in StatefulSet demand-backup-instance1-jssh successful statefulset-controller logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:48 +0000 UTC Warning Pod demand-backup-instance1-jssh-0.spec.containers{database} Unhealthy Readiness probe failed: kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:51 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:51 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-877tj.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:51 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:51 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-krwmg.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:51 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer} Killing Stopping container pgbouncer kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:51 +0000 UTC Normal Pod demand-backup-pgbouncer-7cd7d68587-sm8pt.spec.containers{pgbouncer-config} Killing Stopping container pgbouncer-config kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:51 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest} Killing Stopping container pgbackrest kubelet logger.go:42: 17:43:18 | demand-backup | 2026-02-17 17:42:51 +0000 UTC Normal Pod demand-backup-repo-host-0.spec.containers{pgbackrest-config} Killing Stopping container pgbackrest-config kubelet logger.go:42: 17:43:19 | demand-backup | Deleting namespace "kuttl-test-enough-midge" === NAME kuttl harness.go:404: run tests finished harness.go:511: cleaning up harness.go:568: removing temp folder: "" --- PASS: kuttl (1434.49s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/demand-backup (1433.68s) PASS